mirror of
https://gitlab.com/futo-org/fcast.git
synced 2025-06-24 21:25:23 +00:00
Gitlab CI Testing
This commit is contained in:
parent
60684e33bc
commit
a6cd28a36d
2 changed files with 89 additions and 1 deletions
|
@ -72,5 +72,5 @@ deploy:
|
|||
- cd receivers/electron
|
||||
- apt update && apt install -y python3-boto3
|
||||
script:
|
||||
- python scripts/deploy.py
|
||||
- python3 scripts/deploy.py
|
||||
when: manual
|
||||
|
|
88
receivers/electron/scripts/deploy.py
Normal file
88
receivers/electron/scripts/deploy.py
Normal file
|
@ -0,0 +1,88 @@
|
|||
import os
|
||||
import hashlib
|
||||
import boto3
|
||||
from botocore.client import Config
|
||||
import shutil
|
||||
|
||||
ACCOUNT_ID = os.environ.get('R2_ACCOUNT_ID')
|
||||
ACCESS_KEY_ID = os.environ.get('R2_ACCESS_KEY_ID')
|
||||
SECRET_ACCESS_KEY = os.environ.get('R2_SECRET_ACCESS_KEY')
|
||||
BUCKET_NAME = os.environ.get('R2_BUCKET_NAME')
|
||||
|
||||
LOCAL_CACHE_DIR = os.environ.get('FCAST_LOCAL_CACHE_DIR')
|
||||
|
||||
# Note: Cloudflare R2 docs outdated, secret is not supposed to be hashed...
|
||||
|
||||
# Hash the secret access key using SHA-256
|
||||
#hashed_secret_key = hashlib.sha256(SECRET_ACCESS_KEY.encode()).hexdigest()
|
||||
|
||||
# Configure the S3 client for Cloudflare R2
|
||||
s3 = boto3.client('s3',
|
||||
endpoint_url=f'https://{ACCOUNT_ID}.r2.cloudflarestorage.com',
|
||||
aws_access_key_id=ACCESS_KEY_ID,
|
||||
# aws_secret_access_key=hashed_secret_key,
|
||||
aws_secret_access_key=SECRET_ACCESS_KEY,
|
||||
config=Config(
|
||||
signature_version='s3v4'
|
||||
)
|
||||
)
|
||||
list_response = s3.list_objects_v2(Bucket=BUCKET_NAME)
|
||||
bucket_files = list_response.get('Contents', [])
|
||||
|
||||
def copy_artifacts_to_local_cache():
|
||||
dst = os.path.join(LOCAL_CACHE_DIR, 'temp')
|
||||
shutil.copytree('/artifacts', f'{dst}', dirs_exist_ok=True)
|
||||
|
||||
# TODO: do partial sync to prevent downloading full bucket (only what is needed for delta updates and purge old files
|
||||
def sync_local_cache():
|
||||
local_files = []
|
||||
for _, _, files in os.walk(LOCAL_CACHE_DIR):
|
||||
for filename in files:
|
||||
local_files.append(filename)
|
||||
|
||||
for obj in bucket_files:
|
||||
filename = obj['Key']
|
||||
save_path = os.path.join(LOCAL_CACHE_DIR, filename)
|
||||
|
||||
if os.path.basename(filename) not in local_files:
|
||||
print(f'Downloading file: {filename}')
|
||||
get_response = s3.get_object(Bucket=BUCKET_NAME, Key=filename)
|
||||
|
||||
os.makedirs(os.path.dirname(save_path), exist_ok=True)
|
||||
with open(save_path, 'wb') as file:
|
||||
file.write(get_response['Body'].read())
|
||||
|
||||
def upload_local_cache():
|
||||
local_files = []
|
||||
for root, _, files in os.walk(LOCAL_CACHE_DIR):
|
||||
for filename in files:
|
||||
local_files.append(os.path.relpath(os.path.join(root, filename), LOCAL_CACHE_DIR))
|
||||
|
||||
for file_path in local_files:
|
||||
if os.path.basename(file_path) not in map(lambda x: os.path.basename(x['Key']), bucket_files):
|
||||
print(f'Uploading file: {file_path}')
|
||||
|
||||
with open(os.path.join(LOCAL_CACHE_DIR, file_path), 'rb') as file:
|
||||
put_response = s3.put_object(
|
||||
Body=file,
|
||||
Bucket=BUCKET_NAME,
|
||||
Key=file_path,
|
||||
)
|
||||
|
||||
def generate_delta_updates():
|
||||
pass
|
||||
|
||||
# generate html previous version browsing (based off of bucket + and local if does not have all files)
|
||||
def generate_previous_releases_page():
|
||||
pass
|
||||
|
||||
def update_website():
|
||||
pass
|
||||
|
||||
# CI Operations
|
||||
copy_artifacts_to_local_cache()
|
||||
# sync_local_cache()
|
||||
# generate_delta_updates()
|
||||
# upload_local_cache()
|
||||
# generate_previous_releases_page()
|
||||
# update_website()
|
Loading…
Add table
Add a link
Reference in a new issue