Implement common config + image download
This commit is contained in:
parent
467cce9ac3
commit
64adcf3647
14
etc/spoc/spoc.conf
Normal file
14
etc/spoc/spoc.conf
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
[general]
|
||||||
|
data-dir = /var/lib/spoc
|
||||||
|
log-dir = /var/log/spoc
|
||||||
|
network-interface = spocbr0
|
||||||
|
|
||||||
|
[publish]
|
||||||
|
publish-dir = /srv/build/spoc
|
||||||
|
signing-key = /etc/spoc/publish.key
|
||||||
|
|
||||||
|
[repo]
|
||||||
|
url = https://repo.spotter.cz/spoc
|
||||||
|
username =
|
||||||
|
password =
|
||||||
|
public-key = MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEWJXH4Qm0kt2L86sntQH+C1zOJNQ0qMRt0vx4krTxRs9HQTQYAy//JC92ea2aKleA8OL0JF90b1NYXcQCWdAS+vE/ng9IEAii8C2+5nfuFeZ5YUjbQhfFblwHSM0c7hEG
|
@ -8,7 +8,7 @@ import shlex
|
|||||||
from spoc import repo_local
|
from spoc import repo_local
|
||||||
from spoc.container import Container, STATE_RUNNING, STATE_STOPPED
|
from spoc.container import Container, STATE_RUNNING, STATE_STOPPED
|
||||||
from spoc.image import Image
|
from spoc.image import Image
|
||||||
from spoc.paths import VOLUME_DIR
|
from spoc.config import VOLUME_DIR
|
||||||
|
|
||||||
ACTION_LIST = 1
|
ACTION_LIST = 1
|
||||||
ACTION_CREATE = 2
|
ACTION_CREATE = 2
|
||||||
|
@ -17,9 +17,9 @@ ACTION_BUILD = 4
|
|||||||
ACTION_PUBLISH = 5
|
ACTION_PUBLISH = 5
|
||||||
ACTION_UNPUBLISH = 6
|
ACTION_UNPUBLISH = 6
|
||||||
|
|
||||||
def get_image_name(filepath):
|
def get_image_name(file_path):
|
||||||
# Read and return image name from image file
|
# Read and return image name from image file
|
||||||
with open(filepath) as f:
|
with open(file_path) as f:
|
||||||
for line in f:
|
for line in f:
|
||||||
if line.startswith('IMAGE '):
|
if line.startswith('IMAGE '):
|
||||||
return line.split()[1]
|
return line.split()[1]
|
||||||
@ -36,7 +36,7 @@ def listing(repo_type):
|
|||||||
print(image)
|
print(image)
|
||||||
|
|
||||||
def download(image_name):
|
def download(image_name):
|
||||||
raise NotImplementedException() # TODO
|
Image(image_name, False).download()
|
||||||
|
|
||||||
def delete(image_name):
|
def delete(image_name):
|
||||||
Image(image_name, False).delete()
|
Image(image_name, False).delete()
|
||||||
|
46
usr/lib/python3.8/spoc/config.py
Normal file
46
usr/lib/python3.8/spoc/config.py
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
import configparser
|
||||||
|
import os
|
||||||
|
import urllib.parse
|
||||||
|
|
||||||
|
config = configparser.ConfigParser()
|
||||||
|
config.read('/etc/spoc/spoc.conf')
|
||||||
|
print ('CONFIG LOADED') # TODO: Debug, remove
|
||||||
|
|
||||||
|
def get_repo_auth(config):
|
||||||
|
username = config.get('repo', 'username', fallback='')
|
||||||
|
password = config.get('repo', 'password', fallback='')
|
||||||
|
if not username and not password:
|
||||||
|
return None
|
||||||
|
return (username, password)
|
||||||
|
|
||||||
|
def get_repo_pubkey(config):
|
||||||
|
pubkey = config.get('repo', 'public-key', fallback='')
|
||||||
|
pubkey = f'-----BEGIN PUBLIC KEY-----\n{pubkey}\n-----END PUBLIC KEY-----'
|
||||||
|
return pubkey.encode()
|
||||||
|
|
||||||
|
NETWORK_INTERFACE = config.get('general', 'network-interface', 'spocbr0')
|
||||||
|
|
||||||
|
DATA_DIR = config.get('general', 'data-dir', fallback='/var/lib/spoc')
|
||||||
|
APPS_DIR = os.path.join(DATA_DIR, 'apps')
|
||||||
|
CONTAINERS_DIR = os.path.join(DATA_DIR, 'containers')
|
||||||
|
LAYERS_DIR = os.path.join(DATA_DIR, 'layers')
|
||||||
|
VOLUME_DIR = os.path.join(DATA_DIR, 'volumes')
|
||||||
|
HOSTS_FILE = os.path.join(DATA_DIR, 'hosts')
|
||||||
|
REPO_FILE = os.path.join(DATA_DIR, 'packages.json')
|
||||||
|
LOG_DIR = config.get('general', 'log-dir', fallback='/var/log/spoc')
|
||||||
|
LOCK_FILE = '/run/lock/spoc.lock'
|
||||||
|
|
||||||
|
PUB_DIR = config.get('publish', 'publish-dir', fallback=os.path.join(DATA_DIR, 'publish'))
|
||||||
|
PUB_LAYERS_DIR = os.path.join(PUB_DIR, 'layers')
|
||||||
|
PUB_APPS_DIR = os.path.join(PUB_DIR, 'apps')
|
||||||
|
PUB_PACKAGES_FILE = os.path.join(PUB_DIR, 'packages.json')
|
||||||
|
PUB_SIG_FILE = os.path.join(PUB_DIR, 'packages.sig')
|
||||||
|
PUB_PRIVKEY_FILE = config.get('publish', 'signing-key', fallback='/etc/spoc/publish.key')
|
||||||
|
|
||||||
|
REPO_URL = config.get('repo', 'url', fallback='https://localhost')
|
||||||
|
REPO_PACKAGES_URL = urllib.parse.urljoin(REPO_URL, 'packages.json')
|
||||||
|
REPO_SIG_URL = urllib.parse.urljoin(REPO_URL, 'packages.sig')
|
||||||
|
REPO_AUTH = get_repo_auth(config)
|
||||||
|
REPO_PUBKEY = get_repo_pubkey(config)
|
@ -12,7 +12,7 @@ from . import depsolver
|
|||||||
from . import network
|
from . import network
|
||||||
from . import repo_local
|
from . import repo_local
|
||||||
from .exceptions import InvalidContainerStateError
|
from .exceptions import InvalidContainerStateError
|
||||||
from .paths import CONTAINERS_DIR, LAYERS_DIR, LOG_DIR, HOSTS_FILE, VOLUME_DIR
|
from .config import CONTAINERS_DIR, LAYERS_DIR, LOG_DIR, HOSTS_FILE, VOLUME_DIR
|
||||||
from .templates import LXC_CONTAINER_TEMPLATE
|
from .templates import LXC_CONTAINER_TEMPLATE
|
||||||
|
|
||||||
# States taken from https://github.com/lxc/lxc/blob/master/src/lxc/state.h
|
# States taken from https://github.com/lxc/lxc/blob/master/src/lxc/state.h
|
||||||
|
@ -3,11 +3,12 @@
|
|||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import tarfile
|
import tarfile
|
||||||
|
import urllib.parse
|
||||||
|
|
||||||
from . import repo_local
|
from . import repo_local
|
||||||
|
from . import repo_online
|
||||||
from . import repo_publish
|
from . import repo_publish
|
||||||
from . import utils
|
from .config import LAYERS_DIR, PUB_LAYERS_DIR, REPO_URL
|
||||||
from .paths import LAYERS_DIR, PUB_LAYERS_DIR
|
|
||||||
|
|
||||||
DEFINITION_MEMBERS = {'layers', 'env', 'uid', 'gid', 'cmd', 'cwd', 'ready', 'halt', 'size', 'dlsize', 'hash'}
|
DEFINITION_MEMBERS = {'layers', 'env', 'uid', 'gid', 'cmd', 'cwd', 'ready', 'halt', 'size', 'dlsize', 'hash'}
|
||||||
|
|
||||||
@ -16,6 +17,7 @@ class Image:
|
|||||||
self.name = name
|
self.name = name
|
||||||
self.layer_path = os.path.join(LAYERS_DIR, name)
|
self.layer_path = os.path.join(LAYERS_DIR, name)
|
||||||
self.archive_path = os.path.join(PUB_LAYERS_DIR, f'{name}.tar.xz')
|
self.archive_path = os.path.join(PUB_LAYERS_DIR, f'{name}.tar.xz')
|
||||||
|
self.online_path = urllib.parse.urljoin(REPO_URL, 'images', f'{image_name}.tar.xz')
|
||||||
self.layers = [name]
|
self.layers = [name]
|
||||||
self.env = {}
|
self.env = {}
|
||||||
self.uid = None
|
self.uid = None
|
||||||
@ -58,13 +60,13 @@ class Image:
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
def publish(self):
|
def publish(self):
|
||||||
ctr = utils.TarSizeCounter()
|
|
||||||
os.makedirs(PUB_LAYERS_DIR, 0o755, True)
|
os.makedirs(PUB_LAYERS_DIR, 0o755, True)
|
||||||
|
files = repo_publish.TarSizeCounter()
|
||||||
with tarfile.open(self.archive_path, 'w:xz') as tar:
|
with tarfile.open(self.archive_path, 'w:xz') as tar:
|
||||||
tar.add(self.layer_path, self.name, filter=ctr.add_file)
|
tar.add(self.layer_path, self.name, filter=files.add_file)
|
||||||
self.size = ctr.size
|
self.size = files.size
|
||||||
self.dlsize = os.path.getsize(self.archive_path)
|
self.dlsize = os.path.getsize(self.archive_path)
|
||||||
self.hash = utils.hash_file(self.archive_path)
|
self.hash = repo_publish.sign_file(self.archive_path).hex()
|
||||||
repo_publish.register_image(self.name, self.get_definition())
|
repo_publish.register_image(self.name, self.get_definition())
|
||||||
|
|
||||||
def unpublish(self):
|
def unpublish(self):
|
||||||
@ -73,3 +75,9 @@ class Image:
|
|||||||
os.unlink(self.archive_path)
|
os.unlink(self.archive_path)
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
def download(self):
|
||||||
|
definition = repo_online.get_image(self.name)
|
||||||
|
repo_online.download_archive(self.online_path, self.layer_path, definition['hash'])
|
||||||
|
self.set_definition(definition)
|
||||||
|
repo_local.register_image(self.name, definition)
|
||||||
|
@ -10,7 +10,7 @@ import zipfile
|
|||||||
|
|
||||||
from .container import Container
|
from .container import Container
|
||||||
from .image import Image
|
from .image import Image
|
||||||
from .paths import VOLUME_DIR
|
from .config import VOLUME_DIR
|
||||||
|
|
||||||
class ImageBuilder:
|
class ImageBuilder:
|
||||||
def build(self, image, filename):
|
def build(self, image, filename):
|
||||||
@ -114,9 +114,60 @@ class ImageBuilder:
|
|||||||
# Copy files from the host or download them from a http(s) URL
|
# Copy files from the host or download them from a http(s) URL
|
||||||
dst = os.path.join(self.image.layer_path, dst.lstrip('/'))
|
dst = os.path.join(self.image.layer_path, dst.lstrip('/'))
|
||||||
if src.startswith('http://') or src.startswith('https://'):
|
if src.startswith('http://') or src.startswith('https://'):
|
||||||
utils.unpack_http_archive(src, dst)
|
unpack_http_archive(src, dst)
|
||||||
else:
|
else:
|
||||||
src = os.path.join(os.path.dirname(self.filename), src)
|
src = os.path.join(os.path.dirname(self.filename), src)
|
||||||
utils.copy_tree(src, dst)
|
copy_tree(src, dst)
|
||||||
# Shift UID/GID of the files to the unprivileged range
|
# Shift UID/GID of the files to the unprivileged range
|
||||||
utils.shift_uid(dst, os.stat(dst, follow_symlinks=False))
|
shift_uid(dst, os.stat(dst, follow_symlinks=False))
|
||||||
|
|
||||||
|
def unpack_http_archive(src, dst):
|
||||||
|
# Decompress an archive downloaded via http(s)
|
||||||
|
with tempfile.TemporaryFile() as tmp_archive:
|
||||||
|
# Download the file via http(s) and store as temporary file
|
||||||
|
with requests.Session() as session:
|
||||||
|
resource = session.get(src, stream=True)
|
||||||
|
for chunk in resource.iter_content(chunk_size=None):
|
||||||
|
if chunk:
|
||||||
|
tmp_archive.write(chunk)
|
||||||
|
# Check if the magic bytes and determine if the file is zip
|
||||||
|
tmp_archive.seek(0)
|
||||||
|
is_zip = zipfile.is_zipfile(tmp_archive)
|
||||||
|
# Extract the file. If it is not zip, assume tar (bzip2, gizp or xz)
|
||||||
|
tmp_archive.seek(0)
|
||||||
|
if is_zip:
|
||||||
|
with zipfile.ZipFile(tmp_archive) as zip:
|
||||||
|
zip.extractall(dst)
|
||||||
|
else:
|
||||||
|
with tarfile.open(fileobj=tmp_archive) as tar:
|
||||||
|
tar.extractall(dst, numeric_owner=True)
|
||||||
|
|
||||||
|
def copy_tree(src, dst):
|
||||||
|
# TODO: shutil.copytree?
|
||||||
|
# Copies files from the host
|
||||||
|
if not os.path.isdir(src):
|
||||||
|
shutil.copy2(src, dst)
|
||||||
|
else:
|
||||||
|
os.makedirs(dst, exist_ok=True)
|
||||||
|
for name in os.listdir(src):
|
||||||
|
copy_tree(os.path.join(src, name), os.path.join(dst, name))
|
||||||
|
shutil.copystat(src, dst)
|
||||||
|
|
||||||
|
def shift_uid(path, path_stat):
|
||||||
|
# Shifts UID/GID of a file or a directory and its contents to the unprivileged range
|
||||||
|
# The function parameters could arguably be more friendly, but os.scandir() already calls stat() on the entires,
|
||||||
|
# so it would be wasteful to not reuse them for considerable performance gain
|
||||||
|
uid = path_stat.st_uid
|
||||||
|
gid = path_stat.st_gid
|
||||||
|
do_chown = False
|
||||||
|
if uid < 100000:
|
||||||
|
uid = uid + 100000
|
||||||
|
do_chown = True
|
||||||
|
if gid < 100000:
|
||||||
|
gid = gid + 100000
|
||||||
|
do_chown = True
|
||||||
|
if do_chown:
|
||||||
|
os.chown(path, uid, gid, follow_symlinks=False)
|
||||||
|
if stat.S_ISDIR(path_stat.st_mode):
|
||||||
|
for entry in os.scandir(path):
|
||||||
|
shift_uid(entry.path, entry.stat(follow_symlinks=False))
|
||||||
|
@ -5,9 +5,8 @@ import ipaddress
|
|||||||
import socket
|
import socket
|
||||||
import struct
|
import struct
|
||||||
|
|
||||||
from .paths import HOSTS_FILE
|
from .config import HOSTS_FILE, NETWORK_INTERFACE
|
||||||
|
|
||||||
INTERFACE_NAME = 'spocbr0'
|
|
||||||
# ioctl magic constants taken from https://git.musl-libc.org/cgit/musl/tree/include/sys/ioctl.h (same as glibc)
|
# ioctl magic constants taken from https://git.musl-libc.org/cgit/musl/tree/include/sys/ioctl.h (same as glibc)
|
||||||
IOCTL_SIOCGIFADDR = 0x8915
|
IOCTL_SIOCGIFADDR = 0x8915
|
||||||
IOCTL_SIOCGIFNETMASK = 0x891b
|
IOCTL_SIOCGIFNETMASK = 0x891b
|
||||||
@ -32,7 +31,7 @@ def get_bridge_interface():
|
|||||||
# Returns bridge interface's IP address and netmask
|
# Returns bridge interface's IP address and netmask
|
||||||
with socket.socket(socket.AF_INET) as sock:
|
with socket.socket(socket.AF_INET) as sock:
|
||||||
# Get IPv4Interface for given interface name
|
# Get IPv4Interface for given interface name
|
||||||
packed_ifname = struct.pack('256s', INTERFACE_NAME.encode())
|
packed_ifname = struct.pack('256s', NETWORK_INTERFACE.encode())
|
||||||
ip = socket.inet_ntoa(fcntl.ioctl(sock.fileno(), IOCTL_SIOCGIFADDR, packed_ifname)[20:24])
|
ip = socket.inet_ntoa(fcntl.ioctl(sock.fileno(), IOCTL_SIOCGIFADDR, packed_ifname)[20:24])
|
||||||
netmask = socket.inet_ntoa(fcntl.ioctl(sock.fileno(), IOCTL_SIOCGIFNETMASK, packed_ifname)[20:24])
|
netmask = socket.inet_ntoa(fcntl.ioctl(sock.fileno(), IOCTL_SIOCGIFNETMASK, packed_ifname)[20:24])
|
||||||
return ipaddress.IPv4Interface(f'{ip}/{netmask}')
|
return ipaddress.IPv4Interface(f'{ip}/{netmask}')
|
||||||
|
@ -1,19 +0,0 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
ROOT_DIR = '/var/lib/spoc'
|
|
||||||
CONTAINERS_DIR = '/var/lib/spoc/containers'
|
|
||||||
LAYERS_DIR = '/var/lib/spoc/layers'
|
|
||||||
VOLUME_DIR = '/var/lib/spoc/volumes'
|
|
||||||
HOSTS_FILE = '/var/lib/spoc/hosts'
|
|
||||||
REPO_FILE = '/var/lib/spoc/repository.json'
|
|
||||||
REPO_LOCK = '/run/lock/spoc-repository.lock'
|
|
||||||
|
|
||||||
LOG_DIR = '/var/log/spoc'
|
|
||||||
|
|
||||||
PUB_ROOT_DIR = '/srv/build/spoc'
|
|
||||||
PUB_LAYERS_DIR = '/srv/build/spoc/layers'
|
|
||||||
PUB_APPS_DIR = '/srv/build/spoc/apps'
|
|
||||||
PUB_REPO_FILE = '/srv/build/spoc/repository.json'
|
|
||||||
PUB_SIG_FILE = '/srv/build/spoc/repository.sig'
|
|
||||||
PUB_REPO_LOCK = '/run/lock/spoc-publish.lock'
|
|
||||||
PUB_PRIVATE_KEY = '/etc/spoc/publish.key'
|
|
@ -4,7 +4,7 @@ import json
|
|||||||
|
|
||||||
from .exceptions import AppNotFoundError, ContainerNotFoundError, ImageNotFoundError
|
from .exceptions import AppNotFoundError, ContainerNotFoundError, ImageNotFoundError
|
||||||
from .flock import lock_ex
|
from .flock import lock_ex
|
||||||
from .paths import REPO_FILE, REPO_LOCK
|
from .config import REPO_FILE, LOCK_FILE
|
||||||
|
|
||||||
TYPE_APP = 'apps'
|
TYPE_APP = 'apps'
|
||||||
TYPE_CONTAINER = 'containers'
|
TYPE_CONTAINER = 'containers'
|
||||||
@ -22,7 +22,7 @@ def save(data):
|
|||||||
json.dump(data, f, sort_keys=True, indent=4)
|
json.dump(data, f, sort_keys=True, indent=4)
|
||||||
|
|
||||||
def get_entries(entry_type):
|
def get_entries(entry_type):
|
||||||
with lock_ex(REPO_LOCK):
|
with lock_ex(LOCK_FILE):
|
||||||
data = load()
|
data = load()
|
||||||
return data[entry_type]
|
return data[entry_type]
|
||||||
|
|
||||||
@ -30,13 +30,13 @@ def get_entry(entry_type, name):
|
|||||||
return get_entries(entry_type)[name]
|
return get_entries(entry_type)[name]
|
||||||
|
|
||||||
def add_entry(entry_type, name, definition):
|
def add_entry(entry_type, name, definition):
|
||||||
with lock_ex(REPO_LOCK):
|
with lock_ex(LOCK_FILE):
|
||||||
data = load()
|
data = load()
|
||||||
data[entry_type][name] = definition
|
data[entry_type][name] = definition
|
||||||
save(data)
|
save(data)
|
||||||
|
|
||||||
def delete_entry(entry_type, name):
|
def delete_entry(entry_type, name):
|
||||||
with lock_ex(REPO_LOCK):
|
with lock_ex(LOCK_FILE):
|
||||||
data = load()
|
data = load()
|
||||||
try:
|
try:
|
||||||
del data[entry_type][name]
|
del data[entry_type][name]
|
||||||
|
@ -1,16 +1,47 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
import hashlib
|
||||||
import json
|
import json
|
||||||
import requests
|
import requests
|
||||||
|
import tarfile
|
||||||
|
import tempfile
|
||||||
|
|
||||||
|
from cryptography.hazmat.backends import default_backend
|
||||||
|
from cryptography.hazmat.primitives import hashes
|
||||||
|
from cryptography.hazmat.primitives.asymmetric import ec, utils
|
||||||
|
from cryptography.hazmat.primitives.serialization import load_pem_public_key
|
||||||
|
|
||||||
from . import utils
|
|
||||||
from .exceptions import AppNotFoundError, ImageNotFoundError
|
from .exceptions import AppNotFoundError, ImageNotFoundError
|
||||||
|
from .config import REPO_AUTH, REPO_PUBKEY, REPO_PACKAGES_URL, REPO_SIG_URL
|
||||||
|
|
||||||
TYPE_APP = 'apps'
|
TYPE_APP = 'apps'
|
||||||
TYPE_IMAGE = 'images'
|
TYPE_IMAGE = 'images'
|
||||||
|
|
||||||
|
def download_archive(src, dst, expected_hash):
|
||||||
|
# Download archive via http(s), verify hash and decompress
|
||||||
|
with tempfile.TemporaryFile() as tmp_archive:
|
||||||
|
sha512 = hashes.SHA512()
|
||||||
|
hasher = hashes.Hash(sha512, default_backend())
|
||||||
|
# Download the file via http(s) and store as temporary file
|
||||||
|
with requests.Session(auth=REPO_AUTH) as session:
|
||||||
|
resource = session.get(src, stream=True)
|
||||||
|
for chunk in resource.iter_content(chunk_size=None):
|
||||||
|
if chunk:
|
||||||
|
tmp_archive.write(chunk)
|
||||||
|
hasher.update(chunk)
|
||||||
|
# Verify hash
|
||||||
|
REPO_PUBKEY.verify(bytes.fromhex(expected_hash), hasher.finalize(), ec.ECDSA(utils.Prehashed(sha512)))
|
||||||
|
# Extract the tar.xz file
|
||||||
|
tmp_archive.seek(0)
|
||||||
|
with tarfile.open(fileobj=tmp_archive) as tar:
|
||||||
|
tar.extractall(dst, numeric_owner=True)
|
||||||
|
|
||||||
def load():
|
def load():
|
||||||
raise NotImplementedError()
|
with requests.Session(auth=REPO_AUTH) as session:
|
||||||
|
packages = session.get(REPO_PACKAGES_URL, timout=5).content
|
||||||
|
packages_sig = bytes.fromhex(session.get(REPO_SIG_URL, timout=5).content)
|
||||||
|
REPO_PUBKEY.verify(packages_sig, packages, ec.ECDSA(hashes.SHA512()))
|
||||||
|
return json.loads(packages)
|
||||||
|
|
||||||
def get_entries(entry_type):
|
def get_entries(entry_type):
|
||||||
data = load()
|
data = load()
|
||||||
|
@ -2,31 +2,57 @@
|
|||||||
|
|
||||||
import json
|
import json
|
||||||
|
|
||||||
from . import utils
|
from cryptography.hazmat.backends import default_backend
|
||||||
|
from cryptography.hazmat.primitives import hashes
|
||||||
|
from cryptography.hazmat.primitives.asymmetric import ec, utils
|
||||||
|
from cryptography.hazmat.primitives.serialization import load_pem_private_key
|
||||||
|
|
||||||
from .exceptions import AppNotFoundError, ImageNotFoundError
|
from .exceptions import AppNotFoundError, ImageNotFoundError
|
||||||
from .flock import lock_ex
|
from .flock import lock_ex
|
||||||
from .paths import PUB_PRIVATE_KEY, PUB_REPO_FILE, PUB_REPO_LOCK, PUB_SIG_FILE
|
from .config import LOCK_FILE, PUB_PRIVKEY_FILE, PUB_PACKAGES_FILE, PUB_SIG_FILE
|
||||||
|
|
||||||
TYPE_APP = 'apps'
|
TYPE_APP = 'apps'
|
||||||
TYPE_IMAGE = 'images'
|
TYPE_IMAGE = 'images'
|
||||||
|
|
||||||
|
class TarSizeCounter:
|
||||||
|
def __init__(self):
|
||||||
|
self.size = 0
|
||||||
|
|
||||||
|
def add_file(self, tarinfo):
|
||||||
|
self.size += tarinfo.size
|
||||||
|
return tarinfo
|
||||||
|
|
||||||
|
def sign_file(file_path):
|
||||||
|
# Generate ECDSA HMAC SHA512 signature of a file using EC private key
|
||||||
|
sha512 = hashes.SHA512()
|
||||||
|
hasher = hashes.Hash(sha512, default_backend())
|
||||||
|
with open(file_path, 'rb') as f:
|
||||||
|
while True:
|
||||||
|
data = f.read(64*1024)
|
||||||
|
if not data:
|
||||||
|
break
|
||||||
|
hasher.update(data)
|
||||||
|
with open(PUB_PRIVKEY_FILE, 'rb') as f:
|
||||||
|
private_key = load_pem_private_key(f.read(), None, default_backend())
|
||||||
|
return private_key.sign(hasher.finalize(), ec.ECDSA(utils.Prehashed(sha512)))
|
||||||
|
|
||||||
def load():
|
def load():
|
||||||
try:
|
try:
|
||||||
with open(PUB_REPO_FILE) as f:
|
with open(PUB_PACKAGES_FILE) as f:
|
||||||
return json.load(f)
|
return json.load(f)
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
return {TYPE_IMAGE: {}, TYPE_APP: {}}
|
return {TYPE_IMAGE: {}, TYPE_APP: {}}
|
||||||
|
|
||||||
def save(data):
|
def save(data):
|
||||||
with open(PUB_REPO_FILE, 'w') as f:
|
with open(PUB_PACKAGES_FILE, 'w') as f:
|
||||||
json.dump(data, f, sort_keys=True, indent=4)
|
json.dump(data, f, sort_keys=True, indent=4)
|
||||||
# Cryptographically sign the repository file
|
# Cryptographically sign the repository file
|
||||||
signature = utils.sign_file(PUB_PRIVATE_KEY, PUB_REPO_FILE)
|
signature = sign_file(PUB_PACKAGES_FILE)
|
||||||
with open(PUB_SIG_FILE, 'wb') as f:
|
with open(PUB_SIG_FILE, 'wb') as f:
|
||||||
f.write(signature)
|
f.write(signature)
|
||||||
|
|
||||||
def get_entries(entry_type):
|
def get_entries(entry_type):
|
||||||
with lock_ex(PUB_REPO_LOCK):
|
with lock_ex(LOCK_FILE):
|
||||||
data = load()
|
data = load()
|
||||||
return data[entry_type]
|
return data[entry_type]
|
||||||
|
|
||||||
@ -34,13 +60,13 @@ def get_entry(entry_type, name):
|
|||||||
return get_entries(entry_type)[name]
|
return get_entries(entry_type)[name]
|
||||||
|
|
||||||
def add_entry(entry_type, name, definition):
|
def add_entry(entry_type, name, definition):
|
||||||
with lock_ex(PUB_REPO_LOCK):
|
with lock_ex(LOCK_FILE):
|
||||||
data = load()
|
data = load()
|
||||||
data[entry_type][name] = definition
|
data[entry_type][name] = definition
|
||||||
save(data)
|
save(data)
|
||||||
|
|
||||||
def delete_entry(entry_type, name):
|
def delete_entry(entry_type, name):
|
||||||
with lock_ex(PUB_REPO_LOCK):
|
with lock_ex(LOCK_FILE):
|
||||||
data = load()
|
data = load()
|
||||||
try:
|
try:
|
||||||
del data[entry_type][name]
|
del data[entry_type][name]
|
||||||
|
@ -1,24 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
import hashlib
|
|
||||||
import requests
|
|
||||||
import tarfile
|
|
||||||
import tempfile
|
|
||||||
import zipfile
|
|
||||||
|
|
||||||
from cryptography.hazmat.backends import default_backend
|
|
||||||
from cryptography.hazmat.primitives import hashes
|
|
||||||
from cryptography.hazmat.primitives.asymmetric import ec
|
|
||||||
from cryptography.hazmat.primitives.serialization import load_pem_private_key
|
|
||||||
|
|
||||||
class TarSizeCounter:
|
|
||||||
def __init__(self):
|
|
||||||
self.size = 0
|
|
||||||
|
|
||||||
def add_file(self, tarinfo):
|
|
||||||
self.size += tarinfo.size
|
|
||||||
return tarinfo
|
|
||||||
|
|
||||||
SIZE_PREFIXES = ('', 'k', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y')
|
SIZE_PREFIXES = ('', 'k', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y')
|
||||||
|
|
||||||
def readable_size(bytes):
|
def readable_size(bytes):
|
||||||
@ -27,81 +8,3 @@ def readable_size(bytes):
|
|||||||
i += 1
|
i += 1
|
||||||
bytes /= 1024
|
bytes /= 1024
|
||||||
return f'{bytes:.2f} {SIZE_PREFIXES[i]}B'
|
return f'{bytes:.2f} {SIZE_PREFIXES[i]}B'
|
||||||
|
|
||||||
def sign_file(private_key_path, input_path):
|
|
||||||
# Generate SHA512 signature of a file using EC private key
|
|
||||||
with open(private_key_path, 'rb') as private_key:
|
|
||||||
priv_key = load_pem_private_key(private_key.read(), None, default_backend())
|
|
||||||
with open(input_path, 'rb') as input:
|
|
||||||
data = input.read()
|
|
||||||
return priv_key.sign(data, ec.ECDSA(hashes.SHA512()))
|
|
||||||
|
|
||||||
def hash_file_fd(file):
|
|
||||||
# Calculate SHA512 hash of a file from file descriptor
|
|
||||||
sha512 = hashlib.sha512()
|
|
||||||
while True:
|
|
||||||
data = file.read(65536)
|
|
||||||
if not data:
|
|
||||||
break
|
|
||||||
sha512.update(data)
|
|
||||||
return sha512.hexdigest()
|
|
||||||
|
|
||||||
def hash_file(file_path):
|
|
||||||
# Calculate SHA512 hash of a file
|
|
||||||
with open(file_path, 'rb') as file:
|
|
||||||
return hash_file_fd(file)
|
|
||||||
|
|
||||||
def unpack_http_archive(src, dst, verify_hash=False):
|
|
||||||
# Decompress an archive downloaded via http(s) with optional hash verification
|
|
||||||
with tempfile.TemporaryFile() as tmp_archive:
|
|
||||||
# Download the file via http(s) and store as temporary file
|
|
||||||
with requests.Session() as session:
|
|
||||||
resource = session.get(src, stream=True)
|
|
||||||
for chunk in resource.iter_content(chunk_size=None):
|
|
||||||
if chunk:
|
|
||||||
tmp_archive.write(chunk)
|
|
||||||
if verify_hash:
|
|
||||||
# If a hash has been given, verify if
|
|
||||||
tmp_archive.seek(0)
|
|
||||||
if verify_hash != hash_file_fd(tmp_archive):
|
|
||||||
raise # TODO
|
|
||||||
# Check if the magic bytes and determine if the file is zip
|
|
||||||
tmp_archive.seek(0)
|
|
||||||
is_zip = zipfile.is_zipfile(tmp_archive)
|
|
||||||
# Extract the file. If it is not zip, assume tar (bzip2, gizp or xz)
|
|
||||||
tmp_archive.seek(0)
|
|
||||||
if is_zip:
|
|
||||||
with zipfile.ZipFile(tmp_archive) as zip:
|
|
||||||
zip.extractall(dst)
|
|
||||||
else:
|
|
||||||
with tarfile.open(fileobj=tmp_archive) as tar:
|
|
||||||
tar.extractall(dst, numeric_owner=True)
|
|
||||||
|
|
||||||
def copy_tree(src, dst):
|
|
||||||
# Copies files from the host
|
|
||||||
if not os.path.isdir(src):
|
|
||||||
shutil.copy2(src, dst)
|
|
||||||
else:
|
|
||||||
os.makedirs(dst, exist_ok=True)
|
|
||||||
for name in os.listdir(src):
|
|
||||||
copy_tree(os.path.join(src, name), os.path.join(dst, name))
|
|
||||||
shutil.copystat(src, dst)
|
|
||||||
|
|
||||||
def shift_uid(path, path_stat):
|
|
||||||
# Shifts UID/GID of a file or a directory and its contents to the unprivileged range
|
|
||||||
# The function parameters could arguably be more friendly, but os.scandir() already calls stat() on the entires,
|
|
||||||
# so it would be wasteful to not reuse them for considerable performance gain
|
|
||||||
uid = path_stat.st_uid
|
|
||||||
gid = path_stat.st_gid
|
|
||||||
do_chown = False
|
|
||||||
if uid < 100000:
|
|
||||||
uid = uid + 100000
|
|
||||||
do_chown = True
|
|
||||||
if gid < 100000:
|
|
||||||
gid = gid + 100000
|
|
||||||
do_chown = True
|
|
||||||
if do_chown:
|
|
||||||
os.chown(path, uid, gid, follow_symlinks=False)
|
|
||||||
if stat.S_ISDIR(path_stat.st_mode):
|
|
||||||
for entry in os.scandir(path):
|
|
||||||
shift_uid(entry.path, entry.stat(follow_symlinks=False))
|
|
||||||
|
Loading…
Reference in New Issue
Block a user