1
0
Fork 0
mirror of https://gitlab.com/futo-org/fcast.git synced 2025-06-24 21:25:23 +00:00

Final initial CI changes

This commit is contained in:
Michael Hollister 2024-10-21 10:53:56 -05:00
parent 75786c98e3
commit 8ce444861c
5 changed files with 2469 additions and 3660 deletions

View file

@ -79,7 +79,7 @@ module.exports = {
config: {
arch: 'x64',
appUserModelId: `org.futo.${APPLICATION_NAME}`,
// signing TBD
// TODO: Windows signing
icon: './assets/icons/icon.ico',
name: APPLICATION_TITLE,
programFilesFolderName: APPLICATION_TITLE,
@ -143,7 +143,8 @@ module.exports = {
case "linux": {
let artifactName = `${APPLICATION_NAME}-linux-${e.arch}-${e.packageJSON.version}.zip`;
if (fs.existsSync(`./out/make/zip/linux/${e.arch}/${artifactName}`)) {
// note regarding ubuntu issue
// TODO: Revisit Ubuntu 24.04 SUID sandbox issue
// await extract(`./out/make/zip/linux/${e.arch}/${artifactName}`, { dir: `${process.cwd()}/out/make/zip/linux/${e.arch}/` });
// fs.chownSync(`${process.cwd()}/out/make/zip/linux/${e.arch}/${APPLICATION_NAME}-linux-${e.arch}/chrome-sandbox`, 0, 0);
// fs.chmodSync(`${process.cwd()}/out/make/zip/linux/${e.arch}/${APPLICATION_NAME}-linux-${e.arch}/chrome-sandbox`, 4755);

File diff suppressed because it is too large Load diff

View file

@ -30,14 +30,15 @@
"@types/workerpool": "^6.1.1",
"@types/ws": "^8.5.10",
"@types/yargs": "^17.0.33",
"electron": "^22.2.0",
"electron": "^32.2.1",
"eslint": "^9.10.0",
"extract-zip": "^2.0.1",
"globals": "^15.9.0",
"jest": "^29.7.0",
"mdns-js": "github:mdns-js/node-mdns-js",
"ts-jest": "^29.1.1",
"ts-loader": "^9.4.2",
"typescript": "^4.9.5",
"typescript": "^5.5.4",
"typescript-eslint": "^8.4.0",
"webpack": "^5.75.0",
"webpack-cli": "^5.0.1"

View file

@ -3,13 +3,32 @@ import hashlib
import boto3
from botocore.client import Config
import shutil
from functools import cmp_to_key
ACCOUNT_ID = os.environ.get('R2_ACCOUNT_ID')
ACCESS_KEY_ID = os.environ.get('R2_ACCESS_KEY_ID')
SECRET_ACCESS_KEY = os.environ.get('R2_SECRET_ACCESS_KEY')
BUCKET_NAME = os.environ.get('R2_BUCKET_NAME')
LOCAL_CACHE_DIR = os.environ.get('FCAST_LOCAL_CACHE_DIR')
CACHE_VERSION_AMOUNT = int(os.environ.get('FCAST_CACHE_VERSION_AMOUNT', default="-1"))
DEPLOY_DIR = os.environ.get('FCAST_DO_RUNNER_DEPLOY_DIR')
TEMP_DIR = os.path.join(DEPLOY_DIR, 'temp')
LOCAL_CACHE_DIR = os.path.join(DEPLOY_DIR, 'cache')
# Utility functions
def compare_versions(x, y):
x_parts = x.split('.')
y_parts = y.split('.')
for i in range(len(x_parts)):
if x_parts[i] < y_parts[i]:
return -1
elif x_parts[i] > y_parts[i]:
return 1
return 0
# Initial setup
# Note: Cloudflare R2 docs outdated, secret is not supposed to be hashed...
@ -26,21 +45,34 @@ s3 = boto3.client('s3',
signature_version='s3v4'
)
)
list_response = s3.list_objects_v2(Bucket=BUCKET_NAME)
list_response = s3.list_objects_v2(Bucket=BUCKET_NAME, Prefix='electron/')
bucket_files = list_response.get('Contents', [])
bucket_versions_full = sorted(set(map(lambda x: x['Key'].split('/')[1], bucket_files)), key=cmp_to_key(compare_versions), reverse=True)
bucket_versions = bucket_versions_full if CACHE_VERSION_AMOUNT < 0 else bucket_versions_full[:CACHE_VERSION_AMOUNT]
# CI functions
def copy_artifacts_to_local_cache():
print('Copying artifacts to cache...')
dst = os.path.join(LOCAL_CACHE_DIR, 'temp')
shutil.copytree('/artifacts', f'{dst}', dirs_exist_ok=True, ignore=shutil.ignore_patterns('*.w*'))
# All artifact should have same version in format: /artifacts/PKG/OS/ARCH/fcast-receiver-VERSION-OS-ARCH.PKG
version = os.listdir('/artifacts/zip/linux/x64')[0].split('-')[2]
dst = os.path.join(TEMP_DIR, version)
shutil.copytree('/artifacts', dst, dirs_exist_ok=True, ignore=shutil.ignore_patterns('*.w*'))
return version
# TODO: do partial sync to prevent downloading full bucket (only what is needed for delta updates and purge old files
def sync_local_cache():
print('Syncing local cache with s3...')
local_files = []
for root, _, files in os.walk(LOCAL_CACHE_DIR):
for filename in files:
local_files.append(os.path.relpath(os.path.join(root, filename), LOCAL_CACHE_DIR))
rel_path = os.path.relpath(os.path.join(root, filename), LOCAL_CACHE_DIR)
version = os.path.relpath(rel_path, 'electron/').split('/')[0]
if version in bucket_versions:
local_files.append(rel_path)
else:
print(f'Purging file from local cache: {rel_path}')
os.remove(os.path.join(root, filename))
for obj in bucket_files:
filename = obj['Key']
@ -72,7 +104,7 @@ def upload_local_cache():
Key=file_path,
)
def generate_delta_updates():
def generate_delta_updates(current_version):
pass
# generate html previous version browsing (based off of bucket + and local if does not have all files)
@ -83,9 +115,9 @@ def update_website():
pass
# CI Operations
copy_artifacts_to_local_cache()
current_version = copy_artifacts_to_local_cache()
sync_local_cache()
# generate_delta_updates()
# generate_delta_updates(current_version)
upload_local_cache()
# generate_previous_releases_page()
# update_website()

View file

@ -8,7 +8,6 @@
"experimentalDecorators": true,
"removeComments": false,
"noImplicitAny": false,
"suppressImplicitAnyIndexErrors": true ,
"outDir": "dist"
},
"exclude": [ "node_modules", "test" ]