Merge crypto + some imagebuilder functions into utils

This commit is contained in:
Disassembler 2020-02-12 16:03:32 +01:00
parent de6b5e81ac
commit 467cce9ac3
No known key found for this signature in database
GPG Key ID: 524BD33A0EE29499
7 changed files with 99 additions and 86 deletions

View File

@ -4,6 +4,7 @@
import argparse
from spoc import repo_local
from spoc import repo_online
from spoc import repo_publish
from spoc.image import Image
from spoc.imagebuilder import ImageBuilder
@ -38,8 +39,7 @@ def download(image_name):
raise NotImplementedException() # TODO
def delete(image_name):
image = Image(image_name, False)
image.delete()
Image(image_name, False).delete()
def build(filename, force, do_publish):
# Check if a build is needed and attempt to build the image from image file
@ -69,8 +69,7 @@ def publish(image_name, force):
print(f'Image {image_name} already published, skipping publish task')
def unpublish(image_name):
image = Image(image_name, False)
image.unpublish()
Image(image_name, False).unpublish()
parser = argparse.ArgumentParser(description='SPOC image manager')
parser.set_defaults(action=None)

View File

@ -1,27 +0,0 @@
# -*- coding: utf-8 -*-
import hashlib
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.asymmetric import ec
from cryptography.hazmat.primitives.serialization import load_pem_private_key
def sign_file(private_key_path, input_path):
# Generate SHA512 signature of a file using EC private key
with open(private_key_path, 'rb') as f:
priv_key = load_pem_private_key(f.read(), None, default_backend())
with open(input_path, 'rb') as f:
data = f.read()
return priv_key.sign(data, ec.ECDSA(hashes.SHA512()))
def hash_file(file_path):
# Calculate SHA512 hash of a file
sha512 = hashlib.sha512()
with open(file_path, 'rb') as f:
while True:
data = f.read(65536)
if not data:
break
sha512.update(data)
return sha512.hexdigest()

View File

@ -4,7 +4,6 @@ import os
import shutil
import tarfile
from . import crypto
from . import repo_local
from . import repo_publish
from . import utils
@ -65,7 +64,7 @@ class Image:
tar.add(self.layer_path, self.name, filter=ctr.add_file)
self.size = ctr.size
self.dlsize = os.path.getsize(self.archive_path)
self.hash = crypto.hash_file(self.archive_path)
self.hash = utils.hash_file(self.archive_path)
repo_publish.register_image(self.name, self.get_definition())
def unpublish(self):

View File

@ -114,56 +114,9 @@ class ImageBuilder:
# Copy files from the host or download them from a http(s) URL
dst = os.path.join(self.image.layer_path, dst.lstrip('/'))
if src.startswith('http://') or src.startswith('https://'):
unpack_http_archive(src, dst)
utils.unpack_http_archive(src, dst)
else:
src = os.path.join(os.path.dirname(self.filename), src)
copy_tree(src, dst)
utils.copy_tree(src, dst)
# Shift UID/GID of the files to the unprivileged range
shift_uid(dst, os.stat(dst, follow_symlinks=False))
def unpack_http_archive(src, dst):
# Decompress an archive downloaded via http(s)
with tempfile.TemporaryFile() as tmp_archive:
with requests.Session() as session:
resource = session.get(src, stream=True)
for chunk in resource.iter_content(chunk_size=None):
if chunk:
tmp_archive.write(chunk)
tmp_archive.seek(0)
is_zip = zipfile.is_zipfile(tmp_archive)
tmp_archive.seek(0)
if is_zip:
with zipfile.ZipFile(tmp_archive) as zip:
zip.extractall(dst)
else:
with tarfile.open(fileobj=tmp_archive) as tar:
tar.extractall(dst, numeric_owner=True)
def copy_tree(src, dst):
# Copies files from the host
if not os.path.isdir(src):
shutil.copy2(src, dst)
else:
os.makedirs(dst, exist_ok=True)
for name in os.listdir(src):
copy_tree(os.path.join(src, name), os.path.join(dst, name))
shutil.copystat(src, dst)
def shift_uid(path, path_stat):
# Shifts UID/GID of a file or a directory and its contents to the unprivileged range
# The function parameters could arguably be more friendly, but os.scandir() already calls stat() on the entires,
# so it would be wasteful to not reuse them for considerable performance gain
uid = path_stat.st_uid
gid = path_stat.st_gid
do_chown = False
if uid < 100000:
uid = uid + 100000
do_chown = True
if gid < 100000:
gid = gid + 100000
do_chown = True
if do_chown:
os.chown(path, uid, gid, follow_symlinks=False)
if stat.S_ISDIR(path_stat.st_mode):
for entry in os.scandir(path):
shift_uid(entry.path, entry.stat(follow_symlinks=False))
utils.shift_uid(dst, os.stat(dst, follow_symlinks=False))

View File

@ -3,7 +3,7 @@
import json
import requests
from . import crypto
from . import utils
from .exceptions import AppNotFoundError, ImageNotFoundError
TYPE_APP = 'apps'

View File

@ -2,7 +2,7 @@
import json
from . import crypto
from . import utils
from .exceptions import AppNotFoundError, ImageNotFoundError
from .flock import lock_ex
from .paths import PUB_PRIVATE_KEY, PUB_REPO_FILE, PUB_REPO_LOCK, PUB_SIG_FILE
@ -21,7 +21,7 @@ def save(data):
with open(PUB_REPO_FILE, 'w') as f:
json.dump(data, f, sort_keys=True, indent=4)
# Cryptographically sign the repository file
signature = crypto.sign_file(PUB_PRIVATE_KEY, PUB_REPO_FILE)
signature = utils.sign_file(PUB_PRIVATE_KEY, PUB_REPO_FILE)
with open(PUB_SIG_FILE, 'wb') as f:
f.write(signature)

View File

@ -1,5 +1,16 @@
# -*- coding: utf-8 -*-
import hashlib
import requests
import tarfile
import tempfile
import zipfile
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.asymmetric import ec
from cryptography.hazmat.primitives.serialization import load_pem_private_key
class TarSizeCounter:
def __init__(self):
self.size = 0
@ -16,3 +27,81 @@ def readable_size(bytes):
i += 1
bytes /= 1024
return f'{bytes:.2f} {SIZE_PREFIXES[i]}B'
def sign_file(private_key_path, input_path):
# Generate SHA512 signature of a file using EC private key
with open(private_key_path, 'rb') as private_key:
priv_key = load_pem_private_key(private_key.read(), None, default_backend())
with open(input_path, 'rb') as input:
data = input.read()
return priv_key.sign(data, ec.ECDSA(hashes.SHA512()))
def hash_file_fd(file):
# Calculate SHA512 hash of a file from file descriptor
sha512 = hashlib.sha512()
while True:
data = file.read(65536)
if not data:
break
sha512.update(data)
return sha512.hexdigest()
def hash_file(file_path):
# Calculate SHA512 hash of a file
with open(file_path, 'rb') as file:
return hash_file_fd(file)
def unpack_http_archive(src, dst, verify_hash=False):
# Decompress an archive downloaded via http(s) with optional hash verification
with tempfile.TemporaryFile() as tmp_archive:
# Download the file via http(s) and store as temporary file
with requests.Session() as session:
resource = session.get(src, stream=True)
for chunk in resource.iter_content(chunk_size=None):
if chunk:
tmp_archive.write(chunk)
if verify_hash:
# If a hash has been given, verify if
tmp_archive.seek(0)
if verify_hash != hash_file_fd(tmp_archive):
raise # TODO
# Check if the magic bytes and determine if the file is zip
tmp_archive.seek(0)
is_zip = zipfile.is_zipfile(tmp_archive)
# Extract the file. If it is not zip, assume tar (bzip2, gizp or xz)
tmp_archive.seek(0)
if is_zip:
with zipfile.ZipFile(tmp_archive) as zip:
zip.extractall(dst)
else:
with tarfile.open(fileobj=tmp_archive) as tar:
tar.extractall(dst, numeric_owner=True)
def copy_tree(src, dst):
# Copies files from the host
if not os.path.isdir(src):
shutil.copy2(src, dst)
else:
os.makedirs(dst, exist_ok=True)
for name in os.listdir(src):
copy_tree(os.path.join(src, name), os.path.join(dst, name))
shutil.copystat(src, dst)
def shift_uid(path, path_stat):
# Shifts UID/GID of a file or a directory and its contents to the unprivileged range
# The function parameters could arguably be more friendly, but os.scandir() already calls stat() on the entires,
# so it would be wasteful to not reuse them for considerable performance gain
uid = path_stat.st_uid
gid = path_stat.st_gid
do_chown = False
if uid < 100000:
uid = uid + 100000
do_chown = True
if gid < 100000:
gid = gid + 100000
do_chown = True
if do_chown:
os.chown(path, uid, gid, follow_symlinks=False)
if stat.S_ISDIR(path_stat.st_mode):
for entry in os.scandir(path):
shift_uid(entry.path, entry.stat(follow_symlinks=False))