Cache the online repo for 5 minutes
This commit is contained in:
parent
51c0703d71
commit
74655ed359
@ -24,17 +24,21 @@ VOLUME_DIR = os.path.join(DATA_DIR, 'volumes')
|
|||||||
HOSTS_FILE = os.path.join(DATA_DIR, 'hosts')
|
HOSTS_FILE = os.path.join(DATA_DIR, 'hosts')
|
||||||
REPO_FILE = os.path.join(DATA_DIR, 'repository.json')
|
REPO_FILE = os.path.join(DATA_DIR, 'repository.json')
|
||||||
LOG_DIR = config.get('general', 'log-dir', fallback='/var/log/spoc')
|
LOG_DIR = config.get('general', 'log-dir', fallback='/var/log/spoc')
|
||||||
LOCK_FILE = '/run/lock/spoc.lock'
|
LOCK_FILE = '/run/lock/spoc-local.lock'
|
||||||
|
|
||||||
PUB_DIR = config.get('publish', 'publish-dir', fallback=os.path.join(DATA_DIR, 'publish'))
|
PUB_DIR = config.get('publish', 'publish-dir', fallback=os.path.join(DATA_DIR, 'publish'))
|
||||||
PUB_LAYERS_DIR = os.path.join(PUB_DIR, 'layers')
|
PUB_LAYERS_DIR = os.path.join(PUB_DIR, 'layers')
|
||||||
PUB_APPS_DIR = os.path.join(PUB_DIR, 'apps')
|
PUB_APPS_DIR = os.path.join(PUB_DIR, 'apps')
|
||||||
PUB_PACKAGES_FILE = os.path.join(PUB_DIR, 'packages.json')
|
PUB_REPO_FILE = os.path.join(PUB_DIR, 'repository.json')
|
||||||
PUB_SIG_FILE = os.path.join(PUB_DIR, 'packages.sig')
|
PUB_SIG_FILE = os.path.join(PUB_DIR, 'repository.sig')
|
||||||
PUB_PRIVKEY_FILE = config.get('publish', 'signing-key', fallback='/etc/spoc/publish.key')
|
PUB_PRIVKEY_FILE = config.get('publish', 'signing-key', fallback='/etc/spoc/publish.key')
|
||||||
|
PUB_LOCK_FILE = '/run/lock/spoc-publish.lock'
|
||||||
|
|
||||||
REPO_URL = config.get('repo', 'url', fallback='https://localhost')
|
ONLINE_BASE_URL = config.get('repo', 'url', fallback='https://localhost')
|
||||||
REPO_PACKAGES_URL = urllib.parse.urljoin(REPO_URL, 'packages.json')
|
ONLINE_LAYERS_URL = urllib.parse.urljoin(ONLINE_BASE_URL, 'layers')
|
||||||
REPO_SIG_URL = urllib.parse.urljoin(REPO_URL, 'packages.sig')
|
ONLINE_APPS_URL = urllib.parse.urljoin(ONLINE_BASE_URL, 'apps')
|
||||||
REPO_AUTH = get_repo_auth(config)
|
ONLINE_REPO_URL = urllib.parse.urljoin(ONLINE_BASE_URL, 'repository.json')
|
||||||
REPO_PUBKEY = config.get('repo', 'public-key', fallback='')
|
ONLINE_SIG_URL = urllib.parse.urljoin(ONLINE_BASE_URL, 'repository.sig')
|
||||||
|
ONLINE_REPO_FILE = os.path.join(DATA_DIR, 'online.json')
|
||||||
|
ONLINE_AUTH = get_repo_auth(config)
|
||||||
|
ONLINE_PUBKEY = config.get('repo', 'public-key', fallback='')
|
||||||
|
@ -8,7 +8,7 @@ import urllib.parse
|
|||||||
from . import repo_local
|
from . import repo_local
|
||||||
from . import repo_online
|
from . import repo_online
|
||||||
from . import repo_publish
|
from . import repo_publish
|
||||||
from .config import LAYERS_DIR, PUB_LAYERS_DIR, REPO_URL
|
from .config import LAYERS_DIR, PUB_LAYERS_DIR, ONLINE_LAYERS_URL
|
||||||
|
|
||||||
DEFINITION_MEMBERS = {'layers', 'env', 'uid', 'gid', 'cmd', 'cwd', 'ready', 'halt', 'size', 'dlsize', 'hash'}
|
DEFINITION_MEMBERS = {'layers', 'env', 'uid', 'gid', 'cmd', 'cwd', 'ready', 'halt', 'size', 'dlsize', 'hash'}
|
||||||
|
|
||||||
@ -17,7 +17,7 @@ class Image:
|
|||||||
self.name = name
|
self.name = name
|
||||||
self.layer_path = os.path.join(LAYERS_DIR, name)
|
self.layer_path = os.path.join(LAYERS_DIR, name)
|
||||||
self.archive_path = os.path.join(PUB_LAYERS_DIR, f'{name}.tar.xz')
|
self.archive_path = os.path.join(PUB_LAYERS_DIR, f'{name}.tar.xz')
|
||||||
self.online_path = urllib.parse.urljoin(REPO_URL, 'images', f'{name}.tar.xz')
|
self.online_path = urllib.parse.urljoin(ONLINE_LAYERS_URL, f'{name}.tar.xz')
|
||||||
self.layers = [name]
|
self.layers = [name]
|
||||||
self.env = {}
|
self.env = {}
|
||||||
self.uid = None
|
self.uid = None
|
||||||
|
@ -12,13 +12,13 @@ from cryptography.hazmat.primitives.asymmetric import ec, utils
|
|||||||
from cryptography.hazmat.primitives.serialization import load_pem_public_key
|
from cryptography.hazmat.primitives.serialization import load_pem_public_key
|
||||||
|
|
||||||
from .exceptions import AppNotFoundError, ImageNotFoundError
|
from .exceptions import AppNotFoundError, ImageNotFoundError
|
||||||
from .config import REPO_AUTH, REPO_PUBKEY, REPO_PACKAGES_URL, REPO_SIG_URL
|
from .config import ONLINE_REPO_FILE, ONLINE_AUTH, ONLINE_PUBKEY, ONLINE_REPO_URL, ONLINE_SIG_URL
|
||||||
|
|
||||||
TYPE_APP = 'apps'
|
TYPE_APP = 'apps'
|
||||||
TYPE_IMAGE = 'images'
|
TYPE_IMAGE = 'images'
|
||||||
|
|
||||||
def get_pubkey():
|
def get_pubkey():
|
||||||
pubkey = f'-----BEGIN PUBLIC KEY-----\n{REPO_PUBKEY}\n-----END PUBLIC KEY-----'
|
pubkey = f'-----BEGIN PUBLIC KEY-----\n{ONLINE_PUBKEY}\n-----END PUBLIC KEY-----'
|
||||||
return load_pem_public_key(pubkey.encode(), default_backend())
|
return load_pem_public_key(pubkey.encode(), default_backend())
|
||||||
|
|
||||||
PUBLIC_KEY = get_pubkey()
|
PUBLIC_KEY = get_pubkey()
|
||||||
@ -29,7 +29,7 @@ def download_archive(src, dst, expected_hash):
|
|||||||
sha512 = hashes.SHA512()
|
sha512 = hashes.SHA512()
|
||||||
hasher = hashes.Hash(sha512, default_backend())
|
hasher = hashes.Hash(sha512, default_backend())
|
||||||
# Download the file via http(s) and store as temporary file
|
# Download the file via http(s) and store as temporary file
|
||||||
with requests.Session(auth=REPO_AUTH) as session:
|
with requests.Session(auth=ONLINE_AUTH) as session:
|
||||||
resource = session.get(src, stream=True)
|
resource = session.get(src, stream=True)
|
||||||
for chunk in resource.iter_content(chunk_size=None):
|
for chunk in resource.iter_content(chunk_size=None):
|
||||||
if chunk:
|
if chunk:
|
||||||
@ -42,12 +42,20 @@ def download_archive(src, dst, expected_hash):
|
|||||||
with tarfile.open(fileobj=tmp_archive) as tar:
|
with tarfile.open(fileobj=tmp_archive) as tar:
|
||||||
tar.extractall(dst, numeric_owner=True)
|
tar.extractall(dst, numeric_owner=True)
|
||||||
|
|
||||||
def load():
|
def download_metadata():
|
||||||
with requests.Session(auth=REPO_AUTH) as session:
|
with requests.Session(auth=ONLINE_AUTH) as session:
|
||||||
packages = session.get(REPO_PACKAGES_URL, timout=5).content
|
packages = session.get(ONLINE_REPO_URL, timout=5).content
|
||||||
packages_sig = bytes.fromhex(session.get(REPO_SIG_URL, timout=5).content)
|
packages_sig = bytes.fromhex(session.get(ONLINE_SIG_URL, timout=5).content)
|
||||||
PUBLIC_KEY.verify(packages_sig, packages, ec.ECDSA(hashes.SHA512()))
|
PUBLIC_KEY.verify(packages_sig, packages, ec.ECDSA(hashes.SHA512()))
|
||||||
return json.loads(packages)
|
with open(ONLINE_REPO_FILE, 'wb') as f:
|
||||||
|
f.write(packages)
|
||||||
|
|
||||||
|
def load():
|
||||||
|
if not os.path.exist(ONLINE_REPO_FILE) or os.stat(ONLINE_REPO_FILE).st_mtime+300 < time.time():
|
||||||
|
# Cache the metadata file if local copy doesn't exist or is older than 5 minutes
|
||||||
|
download_metadata()
|
||||||
|
with open(ONLINE_REPO_FILE) as f:
|
||||||
|
return json.load(f)
|
||||||
|
|
||||||
def get_entries(entry_type):
|
def get_entries(entry_type):
|
||||||
data = load()
|
data = load()
|
||||||
|
@ -9,7 +9,7 @@ from cryptography.hazmat.primitives.serialization import load_pem_private_key
|
|||||||
|
|
||||||
from .exceptions import AppNotFoundError, ImageNotFoundError
|
from .exceptions import AppNotFoundError, ImageNotFoundError
|
||||||
from .flock import lock_ex
|
from .flock import lock_ex
|
||||||
from .config import LOCK_FILE, PUB_PRIVKEY_FILE, PUB_PACKAGES_FILE, PUB_SIG_FILE
|
from .config import PUB_LOCK_FILE, PUB_PRIVKEY_FILE, PUB_REPO_FILE, PUB_SIG_FILE
|
||||||
|
|
||||||
TYPE_APP = 'apps'
|
TYPE_APP = 'apps'
|
||||||
TYPE_IMAGE = 'images'
|
TYPE_IMAGE = 'images'
|
||||||
@ -38,21 +38,21 @@ def sign_file(file_path):
|
|||||||
|
|
||||||
def load():
|
def load():
|
||||||
try:
|
try:
|
||||||
with open(PUB_PACKAGES_FILE) as f:
|
with open(PUB_REPO_FILE) as f:
|
||||||
return json.load(f)
|
return json.load(f)
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
return {TYPE_IMAGE: {}, TYPE_APP: {}}
|
return {TYPE_IMAGE: {}, TYPE_APP: {}}
|
||||||
|
|
||||||
def save(data):
|
def save(data):
|
||||||
with open(PUB_PACKAGES_FILE, 'w') as f:
|
with open(PUB_REPO_FILE, 'w') as f:
|
||||||
json.dump(data, f, sort_keys=True, indent=4)
|
json.dump(data, f, sort_keys=True, indent=4)
|
||||||
# Cryptographically sign the repository file
|
# Cryptographically sign the repository file
|
||||||
signature = sign_file(PUB_PACKAGES_FILE)
|
signature = sign_file(PUB_REPO_FILE)
|
||||||
with open(PUB_SIG_FILE, 'wb') as f:
|
with open(PUB_SIG_FILE, 'wb') as f:
|
||||||
f.write(signature)
|
f.write(signature)
|
||||||
|
|
||||||
def get_entries(entry_type):
|
def get_entries(entry_type):
|
||||||
with lock_ex(LOCK_FILE):
|
with lock_ex(PUB_LOCK_FILE):
|
||||||
data = load()
|
data = load()
|
||||||
return data[entry_type]
|
return data[entry_type]
|
||||||
|
|
||||||
@ -60,7 +60,7 @@ def get_entry(entry_type, name):
|
|||||||
return get_entries(entry_type)[name]
|
return get_entries(entry_type)[name]
|
||||||
|
|
||||||
def add_entry(entry_type, name, definition):
|
def add_entry(entry_type, name, definition):
|
||||||
with lock_ex(LOCK_FILE):
|
with lock_ex(PUB_LOCK_FILE):
|
||||||
data = load()
|
data = load()
|
||||||
data[entry_type][name] = definition
|
data[entry_type][name] = definition
|
||||||
save(data)
|
save(data)
|
||||||
|
Loading…
Reference in New Issue
Block a user