Use trailing slashes for URLs and paths, bunch of fixes

This commit is contained in:
Disassembler 2020-02-14 19:58:19 +01:00
parent 74655ed359
commit 5abbd921cc
No known key found for this signature in database
GPG Key ID: 524BD33A0EE29499
6 changed files with 32 additions and 28 deletions

View File

@ -1,14 +1,14 @@
[general]
data-dir = /var/lib/spoc
log-dir = /var/log/spoc
data-dir = /var/lib/spoc/
log-dir = /var/log/spoc/
network-interface = spocbr0
[publish]
publish-dir = /srv/build/spoc
publish-dir = /srv/build/spoc/
signing-key = /etc/spoc/publish.key
[repo]
url = https://repo.spotter.cz/spoc
url = https://repo.spotter.cz/spoc/
username =
password =
public-key = MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEWJXH4Qm0kt2L86sntQH+C1zOJNQ0qMRt0vx4krTxRs9HQTQYAy//JC92ea2aKleA8OL0JF90b1NYXcQCWdAS+vE/ng9IEAii8C2+5nfuFeZ5YUjbQhfFblwHSM0c7hEG

View File

@ -60,7 +60,7 @@ def build(filename, force, do_publish):
def publish(image_name, force):
# Check if publishing is needed and attempt to publish the image
if force or image_name not in repo_publish.get_images():
image = Image(image_name, False)
image = Image(image_name)
image.unpublish()
print(f'Publishing image {image_name}')
image.publish()

View File

@ -16,29 +16,29 @@ def get_repo_auth(config):
NETWORK_INTERFACE = config.get('general', 'network-interface', fallback='spocbr0')
DATA_DIR = config.get('general', 'data-dir', fallback='/var/lib/spoc')
APPS_DIR = os.path.join(DATA_DIR, 'apps')
CONTAINERS_DIR = os.path.join(DATA_DIR, 'containers')
LAYERS_DIR = os.path.join(DATA_DIR, 'layers')
VOLUME_DIR = os.path.join(DATA_DIR, 'volumes')
DATA_DIR = config.get('general', 'data-dir', fallback='/var/lib/spoc/')
APPS_DIR = os.path.join(DATA_DIR, 'apps/')
CONTAINERS_DIR = os.path.join(DATA_DIR, 'containers/')
LAYERS_DIR = os.path.join(DATA_DIR, 'layers/')
VOLUME_DIR = os.path.join(DATA_DIR, 'volumes/')
HOSTS_FILE = os.path.join(DATA_DIR, 'hosts')
REPO_FILE = os.path.join(DATA_DIR, 'repository.json')
LOG_DIR = config.get('general', 'log-dir', fallback='/var/log/spoc')
LOCK_FILE = '/run/lock/spoc-local.lock'
PUB_DIR = config.get('publish', 'publish-dir', fallback=os.path.join(DATA_DIR, 'publish'))
PUB_LAYERS_DIR = os.path.join(PUB_DIR, 'layers')
PUB_APPS_DIR = os.path.join(PUB_DIR, 'apps')
PUB_LAYERS_DIR = os.path.join(PUB_DIR, 'layers/')
PUB_APPS_DIR = os.path.join(PUB_DIR, 'apps/')
PUB_REPO_FILE = os.path.join(PUB_DIR, 'repository.json')
PUB_SIG_FILE = os.path.join(PUB_DIR, 'repository.sig')
PUB_PRIVKEY_FILE = config.get('publish', 'signing-key', fallback='/etc/spoc/publish.key')
PUB_LOCK_FILE = '/run/lock/spoc-publish.lock'
ONLINE_BASE_URL = config.get('repo', 'url', fallback='https://localhost')
ONLINE_LAYERS_URL = urllib.parse.urljoin(ONLINE_BASE_URL, 'layers')
ONLINE_APPS_URL = urllib.parse.urljoin(ONLINE_BASE_URL, 'apps')
ONLINE_BASE_URL = config.get('repo', 'url', fallback='https://localhost/')
ONLINE_LAYERS_URL = urllib.parse.urljoin(ONLINE_BASE_URL, 'layers/')
ONLINE_APPS_URL = urllib.parse.urljoin(ONLINE_BASE_URL, 'apps/')
ONLINE_REPO_URL = urllib.parse.urljoin(ONLINE_BASE_URL, 'repository.json')
ONLINE_SIG_URL = urllib.parse.urljoin(ONLINE_BASE_URL, 'repository.sig')
ONLINE_REPO_FILE = os.path.join(DATA_DIR, 'online.json')
ONLINE_AUTH = get_repo_auth(config)
ONLINE_AUTH = get_repo_auth(config) # TODO: Username + password as part of url?
ONLINE_PUBKEY = config.get('repo', 'public-key', fallback='')

View File

@ -78,6 +78,6 @@ class Image:
def download(self):
definition = repo_online.get_image(self.name)
repo_online.download_archive(self.online_path, self.layer_path, definition['hash'])
repo_online.download_archive(self.online_path, LAYERS_DIR, definition['hash'])
self.set_definition(definition)
repo_local.register_image(self.name, definition)

View File

@ -2,9 +2,11 @@
import hashlib
import json
import os
import requests
import tarfile
import tempfile
import time
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes
@ -23,13 +25,15 @@ def get_pubkey():
PUBLIC_KEY = get_pubkey()
# TODO: HTTP Error handling for all downloads (including imagebuilder)
def download_archive(src, dst, expected_hash):
# Download archive via http(s), verify hash and decompress
with tempfile.TemporaryFile() as tmp_archive:
sha512 = hashes.SHA512()
hasher = hashes.Hash(sha512, default_backend())
# Download the file via http(s) and store as temporary file
with requests.Session(auth=ONLINE_AUTH) as session:
with requests.Session() as session:
resource = session.get(src, stream=True)
for chunk in resource.iter_content(chunk_size=None):
if chunk:
@ -43,16 +47,16 @@ def download_archive(src, dst, expected_hash):
tar.extractall(dst, numeric_owner=True)
def download_metadata():
with requests.Session(auth=ONLINE_AUTH) as session:
packages = session.get(ONLINE_REPO_URL, timout=5).content
packages_sig = bytes.fromhex(session.get(ONLINE_SIG_URL, timout=5).content)
with requests.Session() as session:
packages = session.get(ONLINE_REPO_URL, timeout=5).content
packages_sig = session.get(ONLINE_SIG_URL, timeout=5).content
PUBLIC_KEY.verify(packages_sig, packages, ec.ECDSA(hashes.SHA512()))
with open(ONLINE_REPO_FILE, 'wb') as f:
f.write(packages)
def load():
if not os.path.exist(ONLINE_REPO_FILE) or os.stat(ONLINE_REPO_FILE).st_mtime+300 < time.time():
# Cache the metadata file if local copy doesn't exist or is older than 5 minutes
if not os.path.exists(ONLINE_REPO_FILE) or os.stat(ONLINE_REPO_FILE).st_mtime+300 < time.time():
# Download and the metadata file if local cache doesn't exist or is older than 5 minutes
download_metadata()
with open(ONLINE_REPO_FILE) as f:
return json.load(f)

View File

@ -46,10 +46,10 @@ def load():
def save(data):
with open(PUB_REPO_FILE, 'w') as f:
json.dump(data, f, sort_keys=True, indent=4)
# Cryptographically sign the repository file
signature = sign_file(PUB_REPO_FILE)
with open(PUB_SIG_FILE, 'wb') as f:
f.write(signature)
# Cryptographically sign the repository file
signature = sign_file(PUB_REPO_FILE)
with open(PUB_SIG_FILE, 'wb') as f:
f.write(signature)
def get_entries(entry_type):
with lock_ex(PUB_LOCK_FILE):
@ -66,7 +66,7 @@ def add_entry(entry_type, name, definition):
save(data)
def delete_entry(entry_type, name):
with lock_ex(LOCK_FILE):
with lock_ex(PUB_LOCK_FILE):
data = load()
try:
del data[entry_type][name]