Implement quick updates (#56)

Co-authored-by: nolim1t - f6287b82CC84bcbd <nolim1t@users.noreply.github.com>
Co-authored-by: Philipp Walter <philippwalter@pm.me>
This commit is contained in:
Aaron Dewes 2022-07-16 19:28:39 +02:00 committed by GitHub
parent 1ab3c36a12
commit d0bc4688d5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 285 additions and 114 deletions

View File

@ -3,7 +3,7 @@
# SPDX-License-Identifier: GPL-3.0-or-later
# A collection of fully FLOSS app definitions and FLOSS apps for Citadel.
https://github.com/runcitadel/apps v3-beta
https://github.com/runcitadel/apps v4-beta
# Some apps modified version of Umbrel apps, and their app definitions aren't FLOSS yet.
# Include them anyway, but as a separate repo.

View File

@ -1,24 +0,0 @@
from lib.citadelutils import classToDict
from lib.composegenerator.shared.env import validateEnv
from lib.composegenerator.v3.types import App, generateApp
from lib.composegenerator.v3.generate import convertContainerPermissions
def createCleanConfigFromV3(app: dict, nodeRoot: str):
parsedApp: App = generateApp(app)
for container in range(len(parsedApp.containers)):
# TODO: Make this dynamic and not hardcoded
if parsedApp.containers[container].requires and "c-lightning" in parsedApp.containers[container].requires:
parsedApp.containers[container] = None
parsedApp = convertContainerPermissions(parsedApp)
parsedApp = validateEnv(parsedApp)
finalApp = classToDict(parsedApp)
try:
finalApp['permissions'] = finalApp['metadata']['dependencies']
except:
finalApp['permissions'] = []
finalApp['id'] = finalApp['metadata']['id']
del finalApp['metadata']
# Set version of the cache file format
finalApp['version'] = "1"
return finalApp

View File

@ -54,6 +54,59 @@ def getFreePort(networkingFile: str, appId: str):
return port
def assignIpV4(appId: str, containerName: str):
scriptDir = path.dirname(path.realpath(__file__))
nodeRoot = path.join(scriptDir, "..", "..", "..", "..")
networkingFile = path.join(nodeRoot, "apps", "networking.json")
envFile = path.join(nodeRoot, ".env")
cleanContainerName = containerName.strip()
# If the name still contains a newline, throw an error
if cleanContainerName.find("\n") != -1:
raise Exception("Newline in container name")
env_var = "APP_{}_{}_IP".format(
appId.upper().replace("-", "_"),
cleanContainerName.upper().replace("-", "_")
)
# Write a list of used IPs to the usedIpFile as JSON, and read that file to check if an IP
# can be used
usedIps = []
networkingData = {}
if path.isfile(networkingFile):
with open(networkingFile, 'r') as f:
networkingData = json.load(f)
if 'ip_addresses' in networkingData:
usedIps = list(networkingData['ip_addresses'].values())
else:
networkingData['ip_addresses'] = {}
# An IP 10.21.21.xx, with x being a random number above 40 is asigned to the container
# If the IP is already in use, it will be tried again until it's not in use
# If it's not in use, it will be added to the usedIps list and written to the usedIpFile
# If the usedIpsFile contains all IPs between 10.21.21.20 and 10.21.21.255 (inclusive),
# Throw an error, because no more IPs can be used
if len(usedIps) == 235:
raise Exception("No more IPs can be used")
if "{}-{}".format(appId, cleanContainerName) in networkingData['ip_addresses']:
ip = networkingData['ip_addresses']["{}-{}".format(
appId, cleanContainerName)]
else:
while True:
ip = "10.21.21." + str(random.randint(20, 255))
if ip not in usedIps:
networkingData['ip_addresses']["{}-{}".format(
appId, cleanContainerName)] = ip
break
dotEnv = parse_dotenv(envFile)
if env_var in dotEnv and str(dotEnv[env_var]) == str(ip):
return
with open(envFile, 'a') as f:
f.write("{}={}\n".format(env_var, ip))
with open(networkingFile, 'w') as f:
json.dump(networkingData, f)
def assignIp(container: ContainerStage2, appId: str, networkingFile: str, envFile: str) -> ContainerStage2:
# Strip leading/trailing whitespace from container.name
container.name = container.name.strip()

View File

@ -6,9 +6,11 @@ import stat
import sys
import tempfile
import threading
import random
from typing import List
from sys import argv
import os
import fcntl
import requests
import shutil
import json
@ -31,9 +33,31 @@ from lib.validate import findAndValidateApps
from lib.metadata import getAppRegistry
from lib.entropy import deriveEntropy
class FileLock:
"""Implements a file-based lock using flock(2).
The lock file is saved in directory dir with name lock_name.
dir is the current directory by default.
"""
def __init__(self, lock_name, dir="."):
self.lock_file = open(os.path.join(dir, lock_name), "w")
def acquire(self, blocking=True):
"""Acquire the lock.
If the lock is not already acquired, return None. If the lock is
acquired and blocking is True, block until the lock is released. If
the lock is acquired and blocking is False, raise an IOError.
"""
ops = fcntl.LOCK_EX
if not blocking:
ops |= fcntl.LOCK_NB
fcntl.flock(self.lock_file, ops)
def release(self):
"""Release the lock. Return None even if lock not currently acquired"""
fcntl.flock(self.lock_file, fcntl.LOCK_UN)
# For an array of threads, join them and wait for them to finish
def joinThreads(threads: List[threading.Thread]):
for thread in threads:
thread.join()
@ -49,26 +73,58 @@ updateIgnore = os.path.join(appsDir, ".updateignore")
appDataDir = os.path.join(nodeRoot, "app-data")
userFile = os.path.join(nodeRoot, "db", "user.json")
legacyScript = os.path.join(nodeRoot, "scripts", "app")
with open(os.path.join(nodeRoot, "db", "dependencies.yml"), "r") as file:
dependencies = yaml.safe_load(file)
# Returns a list of every argument after the second one in sys.argv joined into a string by spaces
def getArguments():
arguments = ""
for i in range(3, len(argv)):
arguments += argv[i] + " "
return arguments
def handleAppV4(app):
composeFile = os.path.join(appsDir, app, "docker-compose.yml")
os.chown(os.path.join(appsDir, app), 1000, 1000)
os.system("docker run --rm -v {}:/apps -u 1000:1000 {} /app-cli convert --app-name '{}' --port-map /apps/ports.json /apps/{}/app.yml /apps/{}/result.yml --services 'lnd'".format(appsDir, dependencies['app-cli'], app, app, app))
with open(os.path.join(appsDir, app, "result.yml"), "r") as resultFile:
resultYml = yaml.safe_load(resultFile)
with open(composeFile, "w") as dockerComposeFile:
yaml.dump(resultYml["spec"], dockerComposeFile)
torDaemons = ["torrc-apps", "torrc-apps-2", "torrc-apps-3"]
torFileToAppend = torDaemons[random.randint(0, len(torDaemons) - 1)]
with open(os.path.join(nodeRoot, "tor", torFileToAppend), 'a') as f:
f.write(resultYml["new_tor_entries"])
mainPort = resultYml["port"]
registryFile = os.path.join(nodeRoot, "apps", "registry.json")
registry: list = []
lock = FileLock("citadeL_registry_lock", dir="/tmp")
lock.acquire()
if os.path.isfile(registryFile):
with open(registryFile, 'r') as f:
registry = json.load(f)
else:
raise Exception("Registry file not found")
for registryApp in registry:
if registryApp['id'] == app:
registry[registry.index(registryApp)]['port'] = resultYml["port"]
break
with open(registryFile, 'w') as f:
json.dump(registry, f, indent=4, sort_keys=True)
lock.release()
def getAppYml(name):
with open(os.path.join(appsDir, "sourceMap.json"), "r") as f:
sourceMap = json.load(f)
if not name in sourceMap:
print("Warning: App {} is not in the source map".format(name))
print("Warning: App {} is not in the source map".format(name), file=sys.stderr)
sourceMap = {
name: {
"githubRepo": "runcitadel/core",
"branch": "v2"
"githubRepo": "runcitadel/apps",
"branch": "v4-stable"
}
}
url = 'https://raw.githubusercontent.com/{}/{}/apps/{}/app.yml'.format(sourceMap[name]["githubRepo"], sourceMap[name]["branch"], name)
@ -88,16 +144,31 @@ def update(verbose: bool = False):
json.dump(registry["ports"], f, sort_keys=True)
print("Wrote registry to registry.json")
os.system("docker pull {}".format(dependencies['app-cli']))
threads = list()
# Loop through the apps and generate valid compose files from them, then put these into the app dir
for app in apps:
try:
composeFile = os.path.join(appsDir, app, "docker-compose.yml")
appYml = os.path.join(appsDir, app, "app.yml")
with open(appYml, 'r') as f:
appDefinition = yaml.safe_load(f)
if 'citadel_version' in appDefinition:
thread = threading.Thread(target=handleAppV4, args=(app,))
thread.start()
threads.append(thread)
else:
appCompose = getApp(appDefinition, app)
with open(composeFile, "w") as f:
appCompose = getApp(appYml, app)
if appCompose:
f.write(yaml.dump(appCompose, sort_keys=False))
if verbose:
print("Wrote " + app + " to " + composeFile)
except Exception as err:
print("Failed to convert app {}".format(app))
print(err)
joinThreads(threads)
print("Generated configuration successfully")
@ -117,22 +188,29 @@ def getUserData():
userData = json.load(f)
return userData
def checkUpdateAvailable(name: str) -> bool:
def checkUpdateAvailable(name: str):
latestAppYml = yaml.safe_load(getAppYml(name))
with open(os.path.join(appsDir, name, "app.yml"), "r") as f:
originalAppYml = yaml.safe_load(f)
if not "metadata" in latestAppYml or not "version" in latestAppYml["metadata"] or not "metadata" in originalAppYml or not "version" in originalAppYml["metadata"]:
print("App {} is not valid".format(name))
print("App {} is not valid".format(name), file=sys.stderr)
return False
if semver.compare(latestAppYml["metadata"]["version"], originalAppYml["metadata"]["version"]) > 0:
return {
"updateFrom": originalAppYml["metadata"]["version"],
"updateTo": latestAppYml["metadata"]["version"]
}
else:
return False
return semver.compare(latestAppYml["metadata"]["version"], originalAppYml["metadata"]["version"]) > 0
def getAvailableUpdates():
availableUpdates = []
availableUpdates = {}
apps = findAndValidateApps(appsDir)
for app in apps:
try:
if checkUpdateAvailable(app):
availableUpdates.append(app)
checkResult = checkUpdateAvailable(app)
if checkResult:
availableUpdates[app] = checkResult
except Exception:
print("Warning: Can't check app {} yet".format(app), file=sys.stderr)
return availableUpdates
@ -177,12 +255,7 @@ def stopInstalled():
joinThreads(threads)
# Loads an app.yml and converts it to a docker-compose.yml
def getApp(appFile: str, appId: str):
with open(appFile, 'r') as f:
app = yaml.safe_load(f)
def getApp(app, appId: str):
if not "metadata" in app:
raise Exception("Error: Could not find metadata in " + appFile)
app["metadata"]["id"] = appId
@ -191,6 +264,7 @@ def getApp(appFile: str, appId: str):
print("Warning: App {} uses version 2 of the app.yml format, which is scheduled for removal in Citadel 0.2.0".format(appId))
return createComposeConfigFromV2(app, nodeRoot)
elif 'version' in app and str(app['version']) == "3":
print("Warning: App {} uses version 3 of the app.yml format, which is scheduled for removal in Citadel 0.3.0".format(appId))
return createComposeConfigFromV3(app, nodeRoot)
else:
raise Exception("Error: Unsupported version of app.yml")

View File

@ -4,10 +4,10 @@
import os
import yaml
import traceback
from lib.composegenerator.next.stage1 import createCleanConfigFromV3
from lib.composegenerator.v2.networking import getMainContainer
from lib.composegenerator.shared.networking import getFreePort
from lib.composegenerator.shared.networking import assignIpV4
from lib.entropy import deriveEntropy
from typing import List
import json
@ -41,11 +41,15 @@ def getAppRegistry(apps, app_path):
app_metadata = []
for app in apps:
app_yml_path = os.path.join(app_path, app, 'app.yml')
app_cache_path = os.path.join(app_path, app, 'app.cache.json')
if os.path.isfile(app_yml_path):
try:
with open(app_yml_path, 'r') as f:
app_yml = yaml.safe_load(f.read())
version = False
if 'version' in app_yml:
version = int(app_yml['version'])
elif 'citadel_version' in app_yml:
version = int(app_yml['citadel_version'])
metadata: dict = app_yml['metadata']
metadata['id'] = app
metadata['path'] = metadata.get('path', '')
@ -55,14 +59,14 @@ def getAppRegistry(apps, app_path):
if "mainContainer" in metadata:
metadata.pop("mainContainer")
app_metadata.append(metadata)
if(app_yml["version"] != 3):
if version < 3:
getPortsOldApp(app_yml, app)
else:
elif version == 3:
getPortsV3App(app_yml, app)
with open(app_cache_path, 'w') as f:
json.dump(createCleanConfigFromV3(app_yml, os.path.dirname(app_path)), f)
elif version == 4:
getPortsV4App(app_yml, app)
except Exception as e:
print(e)
print(traceback.format_exc())
print("App {} is invalid!".format(app))
appPortsToMap()
return {
@ -97,12 +101,12 @@ def getNewPort(usedPorts):
lastPort2 = lastPort2 + 1
return lastPort2
def validatePort(appContainer, port, appId, priority: int, isDynamic = False):
def validatePort(containerName, appContainer, port, appId, priority: int, isDynamic = False):
if port not in appPorts and port not in citadelPorts and port != 0:
appPorts[port] = {
"app": appId,
"port": port,
"container": appContainer["name"],
"container": containerName,
"priority": priority,
"dynamic": isDynamic,
}
@ -115,7 +119,7 @@ def validatePort(appContainer, port, appId, priority: int, isDynamic = False):
appPorts[port] = {
"app": appId,
"port": port,
"container": appContainer["name"],
"container": containerName,
"priority": priority,
"dynamic": isDynamic,
}
@ -128,7 +132,7 @@ def validatePort(appContainer, port, appId, priority: int, isDynamic = False):
appPorts[newPort] = {
"app": appId,
"port": port,
"container": appContainer["name"],
"container": containerName,
"priority": priority,
"dynamic": isDynamic,
}
@ -136,28 +140,44 @@ def validatePort(appContainer, port, appId, priority: int, isDynamic = False):
def getPortsOldApp(app, appId):
for appContainer in app["containers"]:
if "port" in appContainer:
validatePort(appContainer, appContainer["port"], appId, 0)
validatePort(appContainer["name"], appContainer, appContainer["port"], appId, 0)
if "ports" in appContainer:
for port in appContainer["ports"]:
realPort = int(str(port).split(":")[0])
validatePort(appContainer, realPort, appId, 2)
validatePort(appContainer["name"], appContainer, realPort, appId, 2)
def getPortsV3App(app, appId):
for appContainer in app["containers"]:
if "port" in appContainer:
if "preferredOutsidePort" in appContainer and "requiresPort" in appContainer and appContainer["requiresPort"]:
validatePort(appContainer, appContainer["preferredOutsidePort"], appId, 2)
validatePort(appContainer["name"], appContainer, appContainer["preferredOutsidePort"], appId, 2)
elif "preferredOutsidePort" in appContainer:
validatePort(appContainer, appContainer["preferredOutsidePort"], appId, 1)
validatePort(appContainer["name"], appContainer, appContainer["preferredOutsidePort"], appId, 1)
else:
validatePort(appContainer, appContainer["port"], appId, 0)
validatePort(appContainer["name"], appContainer, appContainer["port"], appId, 0)
elif "requiredPorts" not in appContainer and "requiredUdpPorts" not in appContainer:
validatePort(appContainer, getNewPort(appPorts.keys()), appId, 0, True)
validatePort(appContainer["name"], appContainer, getNewPort(appPorts.keys()), appId, 0, True)
if "requiredPorts" in appContainer:
for port in appContainer["requiredPorts"]:
validatePort(appContainer, port, appId, 2)
validatePort(appContainer["name"], appContainer, port, appId, 2)
if "requiredUdpPorts" in appContainer:
for port in appContainer["requiredUdpPorts"]:
validatePort(appContainer, port, appId, 2)
validatePort(appContainer["name"], appContainer, port, appId, 2)
def getPortsV4App(app, appId):
for appContainerName in app["services"].keys():
appContainer = app["services"][appContainerName]
if "enable_networking" in appContainer and not appContainer["enable_networking"]:
return
assignIpV4(appId, appContainerName)
if "port" in appContainer:
validatePort(appContainerName, appContainer, appContainer["port"], appId, 0)
if "required_ports" in appContainer:
if "tcp" in appContainer["required_ports"]:
for port in appContainer["required_ports"]["tcp"].keys():
validatePort(appContainerName, appContainer, port, appId, 2)
if "udp" in appContainer["required_ports"]:
for port in appContainer["required_ports"]["udp"].keys():
validatePort(appContainerName, appContainer, port, appId, 2)

View File

@ -6,6 +6,7 @@ import os
import yaml
from jsonschema import validate
import yaml
import traceback
scriptDir = os.path.join(os.path.dirname(os.path.realpath(__file__)), "..")
@ -23,7 +24,7 @@ def validateApp(app: dict):
return True
# Catch and log any errors, and return false
except Exception as e:
print(e)
print(traceback.format_exc())
return False
elif 'version' in app and str(app['version']) == "3":
try:
@ -31,12 +32,13 @@ def validateApp(app: dict):
return True
# Catch and log any errors, and return false
except Exception as e:
print(e)
print(traceback.format_exc())
return False
else:
elif 'version' not in app and 'citadel_version' not in app:
print("Unsupported app version")
return False
else:
return True
# Read in an app.yml file and pass it to the validation function
# Returns true if valid, false otherwise
@ -62,14 +64,17 @@ def findApps(dir: str):
def findAndValidateApps(dir: str):
apps = []
app_data = {}
for root, dirs, files in os.walk(dir, topdown=False):
for name in dirs:
app_dir = os.path.join(root, name)
for subdir in os.scandir(dir):
if not subdir.is_dir():
continue
app_dir = subdir.path
if os.path.isfile(os.path.join(app_dir, "app.yml")):
apps.append(name)
apps.append(subdir.name)
# Read the app.yml and append it to app_data
with open(os.path.join(app_dir, "app.yml"), 'r') as f:
app_data[name] = yaml.safe_load(f)
app_data[subdir.name] = yaml.safe_load(f)
else:
print("App {} has no app.yml".format(subdir.name))
# Now validate all the apps using the validateAppFile function by passing the app.yml as an argument to it, if an app is invalid, remove it from the list
for app in apps:
appyml = app_data[app]
@ -103,6 +108,7 @@ def findAndValidateApps(dir: str):
should_continue=False
if not should_continue:
continue
if 'containers' in appyml:
for container in appyml['containers']:
if 'permissions' in container:
for permission in container['permissions']:

5
db/dependencies.yml Normal file
View File

@ -0,0 +1,5 @@
compose: v2.6.0
dashboard: ghcr.io/runcitadel/dashboard:main@sha256:25b6fb413c10f47e186309c8737926c241c0f2bec923b2c08dd837b828f14dbd
manager: ghcr.io/runcitadel/manager:main@sha256:db5775e986d53e762e43331540bb1c05a27b362da94d587c4a4591c981c00ee4
middleware: ghcr.io/runcitadel/middleware:main@sha256:2fbbfb2e818bf0462f74a6aaab192881615ae018e6dcb62a50d05f82ec622cb0
app-cli: ghcr.io/runcitadel/app-cli:main@sha256:f532923eac28cfac03579cbb440397bcf16c8730f291b39eeada8278331f7054

View File

@ -100,7 +100,7 @@ services:
ipv4_address: $LND_IP
dashboard:
container_name: dashboard
image: ghcr.io/runcitadel/dashboard:v0.0.15@sha256:a2cf5ad79367fb083db0f61e5a296aafee655c99af0c228680644c248ec674a5
image: ghcr.io/runcitadel/dashboard:main@sha256:25b6fb413c10f47e186309c8737926c241c0f2bec923b2c08dd837b828f14dbd
restart: on-failure
stop_grace_period: 1m30s
networks:
@ -108,7 +108,7 @@ services:
ipv4_address: $DASHBOARD_IP
manager:
container_name: manager
image: ghcr.io/runcitadel/manager:v0.0.15@sha256:9fb5a86d9e40a04f93d5b6110d43a0f9a5c4ad6311a843b5442290013196a5ce
image: ghcr.io/runcitadel/manager:main@sha256:db5775e986d53e762e43331540bb1c05a27b362da94d587c4a4591c981c00ee4
depends_on:
- tor
- redis
@ -162,7 +162,7 @@ services:
ipv4_address: $MANAGER_IP
middleware:
container_name: middleware
image: ghcr.io/runcitadel/middleware:v0.0.11@sha256:e472da8cbfa67d9a9dbf321334fe65cdf20a0f9b6d6bab33fdf07210f54e7002
image: ghcr.io/runcitadel/middleware:main@sha256:2fbbfb2e818bf0462f74a6aaab192881615ae018e6dcb62a50d05f82ec622cb0
depends_on:
- manager
- bitcoin
@ -223,6 +223,7 @@ services:
ipv4_address: $ELECTRUM_IP
redis:
container_name: redis
user: 1000:1000
image: redis:7.0.0-bullseye@sha256:ad0705f2e2344c4b642449e658ef4669753d6eb70228d46267685045bf932303
working_dir: /data
volumes:
@ -235,12 +236,6 @@ services:
default:
ipv4_address: $REDIS_IP
app-cli:
container_name: app-cli
image: ghcr.io/runcitadel/app-cli:main@sha256:694e52fa9da1ac976165f269c17e27803032a05a76293dfe3589a50813306ded
volumes:
- ${PWD}/apps:/apps
networks:
default:
name: citadel_main_network

25
events/triggers/quick-update Executable file
View File

@ -0,0 +1,25 @@
#!/usr/bin/env bash
# SPDX-FileCopyrightText: 2021-2022 Citadel and contributors
#
# SPDX-License-Identifier: GPL-3.0-or-later
CITADEL_ROOT="$(readlink -f $(dirname "${BASH_SOURCE[0]}")/../..)"
RELEASE=$(cat "$CITADEL_ROOT"/statuses/update-status.json | jq .updateTo -r)
cat <<EOF > "$CITADEL_ROOT"/statuses/update-status.json
{"state": "installing", "progress": 30, "description": "Starting update", "updateTo": "$RELEASE"}
EOF
curl "https://raw.githubusercontent.com/runcitadel/core/${RELEASE}/db/dependencies.yml" > "$CITADEL_ROOT"/db/dependencies
cat <<EOF > "$CITADEL_ROOT"/statuses/update-status.json
{"state": "installing", "progress": 70, "description": "Starting new containers", "updateTo": "$RELEASE"}
EOF
"${CITADEL_ROOT}/scripts/start"
cat <<EOF > "$CITADEL_ROOT"/statuses/update-status.json
{"state": "success", "progress": 100, "description": "Successfully installed Citadel $RELEASE", "updateTo": ""}
EOF

View File

@ -7,3 +7,4 @@
CITADEL_ROOT="$(readlink -f $(dirname "${BASH_SOURCE[0]}")/../..)"
"${CITADEL_ROOT}/scripts/set-update-channel" "${1}"
"${CITADEL_ROOT}/scripts/start"

View File

@ -2,5 +2,6 @@
"version": "0.0.6",
"name": "Citadel 0.0.6",
"requires": ">=0.0.1",
"isQuickUpdate": false,
"notes": "This update fixes a security issue in Tor which could lead to slower Tor performance or your node being inaccessible via Tor."
}

39
scripts/configure vendored
View File

@ -31,13 +31,14 @@ if not is_arm64 and not is_amd64:
print('Citadel only works on arm64 and amd64!')
exit(1)
dependencies = False
# Check the output of "docker compose version", if it matches "Docker Compose version v2.0.0-rc.3", return true
# Otherwise, return false
def is_compose_rc_or_outdated():
def is_compose_version_except(target_version):
try:
output = subprocess.check_output(['docker', 'compose', 'version'])
if output.decode('utf-8').strip() != 'Docker Compose version v2.3.3':
print("Using outdated Docker Compose, updating...")
if output.decode('utf-8').strip() != 'Docker Compose version {}'.format(target_version):
return True
else:
return False
@ -48,17 +49,19 @@ def is_compose_rc_or_outdated():
def download_docker_compose():
# Skip if os.path.expanduser('~/.docker/cli-plugins/docker-compose') exists
subprocess.check_call(["mkdir", "-p", os.path.expanduser('~/.docker/cli-plugins/')])
if (os.path.exists(os.path.expanduser('~/.docker/cli-plugins/docker-compose')) or os.path.exists('/usr/lib/docker/cli-plugins/docker-compose')) and not is_compose_rc_or_outdated():
print("Found {}\n".format(subprocess.check_output(['docker', 'compose', 'version']).decode('utf-8').strip()))
return
print("Installing Docker Compose...\n")
if is_arm64:
subprocess.check_call(['wget', 'https://github.com/docker/compose/releases/download/v2.3.3/docker-compose-linux-aarch64', '-O', os.path.expanduser('~/.docker/cli-plugins/docker-compose')])
compose_arch = 'aarch64'
elif is_amd64:
subprocess.check_call(['wget', 'https://github.com/docker/compose/releases/download/v2.3.3/docker-compose-linux-x86_64', '-O', os.path.expanduser('~/.docker/cli-plugins/docker-compose')])
os.chmod(os.path.expanduser('~/.docker/cli-plugins/docker-compose'), 0o755)
compose_arch = 'x86_64'
# We validate that no other case than the two above can happen before
if is_compose_version_except(dependencies['compose']):
print("Docker compose not found or not required version, updating.")
compose_url = 'https://github.com/docker/compose/releases/download/{}/docker-compose-linux-{}'.format(dependencies['compose'], compose_arch)
compose_file = os.path.expanduser('~/.docker/cli-plugins/docker-compose')
subprocess.check_call(['wget', compose_url, '-O', compose_file])
os.chmod(compose_file, 0o755)
if not shutil.which("wget"):
print('Wget is not installed!')
@ -72,6 +75,9 @@ if not shutil.which("docker"):
CITADEL_ROOT=os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
os.chdir(CITADEL_ROOT)
with open("./db/dependencies.yml", "r") as file:
dependencies = yaml.safe_load(file)
updating = False
status_dir = os.path.join(CITADEL_ROOT, 'statuses')
# Make sure to use the main status dir for updates
@ -361,6 +367,15 @@ print("Generated configuration files\n")
print("Checking if Docker Compose is installed...")
download_docker_compose()
print("Updating core services...")
print()
with open("docker-compose.yml", 'r') as stream:
compose = yaml.safe_load(stream)
for service in ["manager", "middleware", "dashboard"]:
compose["services"][service]["image"] = dependencies[service]
with open("docker-compose.yml", "w") as stream:
yaml.dump(compose, stream, sort_keys=False)
if not reconfiguring:
print("Updating apps...\n")
os.system('./scripts/app --invoked-by-configure update')

View File

@ -10,7 +10,7 @@ NODE_ROOT="$(readlink -f $(dirname "${BASH_SOURCE[0]}")/..)"
# If $1 is not given, fail
if [ -z "$1" ]; then
echo "Usage: $0 <channel>"
echo "Channel can currently either be 'stable' or 'beta'"
echo "Channel can currently either be 'stable', 'beta' or 'c-lightning'"
exit 1
fi
sed -i "s/UPDATE_CHANNEL=.*/UPDATE_CHANNEL=${1}/" "${NODE_ROOT}/.env"