1
0
Fork 0
mirror of https://gitlab.com/futo-org/fcast.git synced 2025-06-24 21:25:23 +00:00

Initial commit of new updater

This commit is contained in:
Michael Hollister 2024-11-11 12:24:17 -06:00
parent 698c10f356
commit 869ac1433f
16 changed files with 952 additions and 295 deletions

View file

@ -95,7 +95,16 @@ module.exports = {
},
],
hooks: {
postPackage: async (config, packageResults) => {
readPackageJson: async (forgeConfig, packageJson) => {
packageJson.channel = process.env.FCAST_CHANNEL ? process.env.FCAST_CHANNEL : 'stable';
if (packageJson.channel !== 'stable') {
packageJson.channelVersion = process.env.FCAST_CHANNEL_VERSION ? process.env.FCAST_CHANNEL_VERSION : '1';
}
return packageJson;
},
postPackage: async (config, packageResults) => {
switch (packageResults.platform) {
case "darwin": {
let artifactName = `${APPLICATION_NAME}.app`;
@ -108,6 +117,7 @@ module.exports = {
break;
}
},
postMake: async (forgeConfig, makeResults) => {
for (const e of makeResults) {
// Standardize artifact output naming
@ -115,12 +125,12 @@ module.exports = {
case "win32": {
let artifactName = `${APPLICATION_NAME}-win32-${e.arch}-${e.packageJSON.version}.zip`;
if (fs.existsSync(`./out/make/zip/win32/${e.arch}/${artifactName}`)) {
fs.renameSync(`./out/make/zip/win32/${e.arch}/${artifactName}`, `./out/make/zip/win32/${e.arch}/${APPLICATION_NAME}-${e.packageJSON.version}-windows-${e.arch}.zip`);
fs.renameSync(`./out/make/zip/win32/${e.arch}/${artifactName}`, path.join(`./out/make/zip/win32/${e.arch}`, generateArtifactName(e.packageJSON, e.platform, e.arch, 'zip')));
}
artifactName = `${APPLICATION_NAME}.msi`;
if (fs.existsSync(`./out/make/wix/${e.arch}/${artifactName}`)) {
fs.renameSync(`./out/make/wix/${e.arch}/${artifactName}`, `./out/make/wix/${e.arch}/${APPLICATION_NAME}-${e.packageJSON.version}-windows-${e.arch}-setup.msi`);
fs.renameSync(`./out/make/wix/${e.arch}/${artifactName}`, path.join(`./out/make/wix/${e.arch}`, generateArtifactName(e.packageJSON, e.platform, e.arch, 'msi')));
}
break;
@ -129,12 +139,11 @@ module.exports = {
let artifactName = `${APPLICATION_TITLE}.dmg`;
if (fs.existsSync(`./out/make/${artifactName}`)) {
fs.mkdirSync(`./out/make/dmg/${e.arch}`, { recursive: true });
fs.renameSync(`./out/make/${artifactName}`, `./out/make/dmg/${e.arch}/${APPLICATION_NAME}-${e.packageJSON.version}-macOS-${e.arch}.dmg`);
fs.renameSync(`./out/make/${artifactName}`, path.join(`./out/make/dmg/${e.arch}`, generateArtifactName(e.packageJSON, e.platform, e.arch, 'dmg')));
}
console.log(`Making a zip distributable for ${e.platform}/${e.arch}`);
const zipName = `${APPLICATION_NAME}-${e.packageJSON.version}-macOS-${e.arch}.zip`;
const zipPath = path.resolve(process.cwd(), 'out', 'make', 'zip', e.platform, e.arch, zipName);
const zipPath = path.resolve(process.cwd(), 'out', 'make', 'zip', e.platform, e.arch, generateArtifactName(e.packageJSON, e.platform, e.arch, 'zip'));
exec(`mkdir -p ${path.dirname(zipPath)}`, execOutput);
exec(`cd out/${APPLICATION_NAME}-${e.platform}-${e.arch}; zip -r -y "${zipPath}" "${APPLICATION_TITLE}.app"`, execOutput);
@ -149,27 +158,27 @@ const zipName = `${APPLICATION_NAME}-${e.packageJSON.version}-macOS-${e.arch}.zi
// await extract(`./out/make/zip/linux/${e.arch}/${artifactName}`, { dir: `${process.cwd()}/out/make/zip/linux/${e.arch}/` });
// fs.chownSync(`${process.cwd()}/out/make/zip/linux/${e.arch}/${APPLICATION_NAME}-linux-${e.arch}/chrome-sandbox`, 0, 0);
// fs.chmodSync(`${process.cwd()}/out/make/zip/linux/${e.arch}/${APPLICATION_NAME}-linux-${e.arch}/chrome-sandbox`, 4755);
fs.renameSync(`./out/make/zip/linux/${e.arch}/${artifactName}`, `./out/make/zip/linux/${e.arch}/${APPLICATION_NAME}-${e.packageJSON.version}-linux-${e.arch}.zip`);
fs.renameSync(`./out/make/zip/linux/${e.arch}/${artifactName}`, path.join(`./out/make/zip/linux/${e.arch}`, generateArtifactName(e.packageJSON, e.platform, e.arch, 'zip')));
}
artifactName = `${APPLICATION_NAME}_${e.packageJSON.version}_amd64.deb`
if (fs.existsSync(`./out/make/deb/${e.arch}/${artifactName}`)) {
fs.renameSync(`./out/make/deb/${e.arch}/${artifactName}`, `./out/make/deb/${e.arch}/${APPLICATION_NAME}-${e.packageJSON.version}-linux-${e.arch}.deb`);
fs.renameSync(`./out/make/deb/${e.arch}/${artifactName}`, path.join(`./out/make/deb/${e.arch}`, generateArtifactName(e.packageJSON, e.platform, e.arch, 'deb')));
}
artifactName = `${APPLICATION_NAME}_${e.packageJSON.version}_arm64.deb`
if (fs.existsSync(`./out/make/deb/${e.arch}/${artifactName}`)) {
fs.renameSync(`./out/make/deb/${e.arch}/${artifactName}`, `./out/make/deb/${e.arch}/${APPLICATION_NAME}-${e.packageJSON.version}-linux-${e.arch}.deb`);
fs.renameSync(`./out/make/deb/${e.arch}/${artifactName}`, path.join(`./out/make/deb/${e.arch}`, generateArtifactName(e.packageJSON, e.platform, e.arch, 'deb')));
}
artifactName = `${APPLICATION_NAME}-${e.packageJSON.version}-1.x86_64.rpm`
if (fs.existsSync(`./out/make/rpm/${e.arch}/${artifactName}`)) {
fs.renameSync(`./out/make/rpm/${e.arch}/${artifactName}`, `./out/make/rpm/${e.arch}/${APPLICATION_NAME}-${e.packageJSON.version}-linux-${e.arch}.rpm`);
fs.renameSync(`./out/make/rpm/${e.arch}/${artifactName}`, path.join(`./out/make/rpm/${e.arch}`, generateArtifactName(e.packageJSON, e.platform, e.arch, 'rpm')));
}
artifactName = `${APPLICATION_NAME}-${e.packageJSON.version}-1.arm64.rpm`
if (fs.existsSync(`./out/make/rpm/${e.arch}/${artifactName}`)) {
fs.renameSync(`./out/make/rpm/${e.arch}/${artifactName}`, `./out/make/rpm/${e.arch}/${APPLICATION_NAME}-${e.packageJSON.version}-linux-${e.arch}.rpm`);
fs.renameSync(`./out/make/rpm/${e.arch}/${artifactName}`, path.join(`./out/make/rpm/${e.arch}`, generateArtifactName(e.packageJSON, e.platform, e.arch, 'rpm')));
}
break;
@ -205,3 +214,26 @@ function execOutput(err, stdout, stderr) {
}
console.log(stdout);
}
function getArtifactOS(platform) {
switch (platform) {
case 'win32':
return 'windows';
case 'darwin':
return 'macOS';
default:
return platform;
}
}
function generateArtifactName(packageJSON, platform, arch, extension) {
let artifactName = `${APPLICATION_NAME}-${packageJSON.version}-${getArtifactOS(platform)}-${arch}`;
if (extension === 'msi') {
artifactName += '-setup';
}
if (packageJSON.channel !== 'stable') {
artifactName += `-${packageJSON.channel}-${packageJSON.channel_version}`;
}
artifactName += `.${extension}`
return artifactName;
}

View file

@ -1,20 +1,22 @@
{
"name": "fcast-receiver",
"version": "1.9.0",
"version": "2.0.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "fcast-receiver",
"version": "1.9.0",
"version": "2.0.0",
"license": "MIT",
"dependencies": {
"bufferutil": "^4.0.8",
"dashjs": "^4.7.4",
"electron-json-storage": "^4.6.0",
"extract-zip": "^2.0.1",
"hls.js": "^1.5.15",
"http": "^0.0.1-security",
"https": "^1.0.0",
"log4js": "^6.9.1",
"qrcode": "^1.5.3",
"update-electron-app": "^3.0.0",
"url": "^0.11.3",
@ -34,6 +36,7 @@
"@electron/fuses": "^1.8.0",
"@eslint/js": "^9.10.0",
"@futo/forge-maker-wix-linux": "^7.5.0",
"@types/electron-json-storage": "^4.5.4",
"@types/jest": "^29.5.11",
"@types/mdns": "^0.0.38",
"@types/node-forge": "^1.3.10",
@ -2613,6 +2616,13 @@
"@types/responselike": "^1.0.0"
}
},
"node_modules/@types/electron-json-storage": {
"version": "4.5.4",
"resolved": "https://registry.npmjs.org/@types/electron-json-storage/-/electron-json-storage-4.5.4.tgz",
"integrity": "sha512-M0ZzFYFOFejheqF900ux/CyzKC6Gex28rfeg22+MKxR9E82wU37bWrgboe8KHD63igpORmVjjmsZo6qDfkxdQQ==",
"dev": true,
"license": "MIT"
},
"node_modules/@types/estree": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.6.tgz",
@ -3677,7 +3687,6 @@
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
"integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==",
"dev": true,
"license": "MIT"
},
"node_modules/base32-encode": {
@ -3791,7 +3800,6 @@
"version": "1.1.11",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
"dev": true,
"license": "MIT",
"dependencies": {
"balanced-match": "^1.0.0",
@ -4413,7 +4421,6 @@
"version": "0.0.1",
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
"integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==",
"dev": true,
"license": "MIT"
},
"node_modules/convert-source-map": {
@ -4563,6 +4570,15 @@
"integrity": "sha512-bCK/2Z4zLidyB4ReuIsvALH6w31YfAQDmXMqMx6FyfHqvBxtjC0eRumeSu4Bs3XtXwpyIywtSTrVT99BxY1f9w==",
"license": "MIT"
},
"node_modules/date-format": {
"version": "4.0.14",
"resolved": "https://registry.npmjs.org/date-format/-/date-format-4.0.14.tgz",
"integrity": "sha512-39BOQLs9ZjKh0/patS9nrT8wc3ioX3/eA/zgbKNopnF2wCqJEoxywwwElATYvRsXdnOxA/OQeQoFZ3rFjVajhg==",
"license": "MIT",
"engines": {
"node": ">=4.0"
}
},
"node_modules/debug": {
"version": "4.3.7",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.7.tgz",
@ -5274,6 +5290,65 @@
"node": ">=10"
}
},
"node_modules/electron-json-storage": {
"version": "4.6.0",
"resolved": "https://registry.npmjs.org/electron-json-storage/-/electron-json-storage-4.6.0.tgz",
"integrity": "sha512-gAgNsnA7tEtV9LzzOnZTyVIb3cQtCva+bEBVT5pbRGU8ZSZTVKPBrTxIAYjeVfdSjyNXgfb1mr/CZrOJgeHyqg==",
"license": "MIT",
"dependencies": {
"async": "^2.0.0",
"lockfile": "^1.0.4",
"lodash": "^4.0.1",
"mkdirp": "^0.5.1",
"rimraf": "^2.5.1",
"write-file-atomic": "^2.4.2"
}
},
"node_modules/electron-json-storage/node_modules/async": {
"version": "2.6.4",
"resolved": "https://registry.npmjs.org/async/-/async-2.6.4.tgz",
"integrity": "sha512-mzo5dfJYwAn29PeiJ0zvwTo04zj8HDJj0Mn8TD7sno7q12prdbnasKJHhkm2c1LgrhlJ0teaea8860oxi51mGA==",
"license": "MIT",
"dependencies": {
"lodash": "^4.17.14"
}
},
"node_modules/electron-json-storage/node_modules/mkdirp": {
"version": "0.5.6",
"resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz",
"integrity": "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==",
"license": "MIT",
"dependencies": {
"minimist": "^1.2.6"
},
"bin": {
"mkdirp": "bin/cmd.js"
}
},
"node_modules/electron-json-storage/node_modules/rimraf": {
"version": "2.7.1",
"resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz",
"integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==",
"deprecated": "Rimraf versions prior to v4 are no longer supported",
"license": "ISC",
"dependencies": {
"glob": "^7.1.3"
},
"bin": {
"rimraf": "bin.js"
}
},
"node_modules/electron-json-storage/node_modules/write-file-atomic": {
"version": "2.4.3",
"resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-2.4.3.tgz",
"integrity": "sha512-GaETH5wwsX+GcnzhPgKcKjJ6M2Cq3/iZp1WyY/X1CSqrW+jVNM9Y7D8EC2sM4ZG/V8wZlSniJnCKWPmBYAucRQ==",
"license": "ISC",
"dependencies": {
"graceful-fs": "^4.1.11",
"imurmurhash": "^0.1.4",
"signal-exit": "^3.0.2"
}
},
"node_modules/electron-to-chromium": {
"version": "1.5.41",
"resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.41.tgz",
@ -6142,7 +6217,6 @@
"version": "3.3.1",
"resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.1.tgz",
"integrity": "sha512-X8cqMLLie7KsNUDSdzeN8FYK9rEt4Dt67OsG/DNGnYTSDBG4uFAJFBnUeiV+zCVAvwFy56IjM9sH51jVaEhNxw==",
"dev": true,
"license": "ISC"
},
"node_modules/flora-colossus": {
@ -6243,7 +6317,6 @@
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
"integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==",
"dev": true,
"license": "ISC"
},
"node_modules/fsevents": {
@ -6464,7 +6537,6 @@
"resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
"integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
"deprecated": "Glob versions prior to v9 are no longer supported",
"dev": true,
"license": "ISC",
"dependencies": {
"fs.realpath": "^1.0.0",
@ -6638,7 +6710,6 @@
"version": "4.2.11",
"resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz",
"integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==",
"dev": true,
"license": "ISC"
},
"node_modules/graphemer": {
@ -6952,7 +7023,6 @@
"version": "0.1.4",
"resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz",
"integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=0.8.19"
@ -6980,7 +7050,6 @@
"resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz",
"integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==",
"deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.",
"dev": true,
"license": "ISC",
"dependencies": {
"once": "^1.3.0",
@ -6991,7 +7060,6 @@
"version": "2.0.4",
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==",
"dev": true,
"license": "ISC"
},
"node_modules/ini": {
@ -8344,11 +8412,19 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/lockfile": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/lockfile/-/lockfile-1.0.4.tgz",
"integrity": "sha512-cvbTwETRfsFh4nHsL1eGWapU1XFi5Ot9E85sWAwia7Y7EgB7vfqcZhTKZ+l7hCGxSPoushMv5GKhT5PdLv03WA==",
"license": "ISC",
"dependencies": {
"signal-exit": "^3.0.2"
}
},
"node_modules/lodash": {
"version": "4.17.21",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==",
"dev": true,
"license": "MIT"
},
"node_modules/lodash.get": {
@ -8467,6 +8543,22 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/log4js": {
"version": "6.9.1",
"resolved": "https://registry.npmjs.org/log4js/-/log4js-6.9.1.tgz",
"integrity": "sha512-1somDdy9sChrr9/f4UlzhdaGfDR2c/SaD2a4T7qEkG4jTS57/B3qmnjLYePwQ8cqWnUHZI0iAKxMBpCZICiZ2g==",
"license": "Apache-2.0",
"dependencies": {
"date-format": "^4.0.14",
"debug": "^4.3.4",
"flatted": "^3.2.7",
"rfdc": "^1.3.0",
"streamroller": "^3.1.5"
},
"engines": {
"node": ">=8.0"
}
},
"node_modules/lowercase-keys": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-2.0.0.tgz",
@ -8734,7 +8826,6 @@
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
"integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
"dev": true,
"license": "ISC",
"dependencies": {
"brace-expansion": "^1.1.7"
@ -8747,7 +8838,6 @@
"version": "1.2.8",
"resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz",
"integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==",
"dev": true,
"license": "MIT",
"funding": {
"url": "https://github.com/sponsors/ljharb"
@ -9395,7 +9485,6 @@
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
"integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=0.10.0"
@ -10351,7 +10440,6 @@
"version": "1.4.1",
"resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.4.1.tgz",
"integrity": "sha512-q1b3N5QkRUWUl7iyylaaj3kOpIT0N2i9MqIEQXP73GVsN9cw3fdx8X63cEmWhJGi2PPCF23Ijp7ktmd39rawIA==",
"dev": true,
"license": "MIT"
},
"node_modules/rimraf": {
@ -10620,7 +10708,6 @@
"version": "3.0.7",
"resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz",
"integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==",
"dev": true,
"license": "ISC"
},
"node_modules/sisteransi": {
@ -10822,6 +10909,52 @@
"node": ">= 0.10.0"
}
},
"node_modules/streamroller": {
"version": "3.1.5",
"resolved": "https://registry.npmjs.org/streamroller/-/streamroller-3.1.5.tgz",
"integrity": "sha512-KFxaM7XT+irxvdqSP1LGLgNWbYN7ay5owZ3r/8t77p+EtSUAfUgtl7be3xtqtOmGUl9K9YPO2ca8133RlTjvKw==",
"license": "MIT",
"dependencies": {
"date-format": "^4.0.14",
"debug": "^4.3.4",
"fs-extra": "^8.1.0"
},
"engines": {
"node": ">=8.0"
}
},
"node_modules/streamroller/node_modules/fs-extra": {
"version": "8.1.0",
"resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-8.1.0.tgz",
"integrity": "sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g==",
"license": "MIT",
"dependencies": {
"graceful-fs": "^4.2.0",
"jsonfile": "^4.0.0",
"universalify": "^0.1.0"
},
"engines": {
"node": ">=6 <7 || >=8"
}
},
"node_modules/streamroller/node_modules/jsonfile": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-4.0.0.tgz",
"integrity": "sha512-m6F1R3z8jjlf2imQHS2Qez5sjKWQzbuuhuJ/FKYFRZvPE3PuHcSMVZzfsLhGVOkfd20obL5SWEBew5ShlquNxg==",
"license": "MIT",
"optionalDependencies": {
"graceful-fs": "^4.1.6"
}
},
"node_modules/streamroller/node_modules/universalify": {
"version": "0.1.2",
"resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz",
"integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==",
"license": "MIT",
"engines": {
"node": ">= 4.0.0"
}
},
"node_modules/string_decoder": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz",

View file

@ -24,6 +24,7 @@
"@electron/fuses": "^1.8.0",
"@eslint/js": "^9.10.0",
"@futo/forge-maker-wix-linux": "^7.5.0",
"@types/electron-json-storage": "^4.5.4",
"@types/jest": "^29.5.11",
"@types/mdns": "^0.0.38",
"@types/node-forge": "^1.3.10",
@ -46,10 +47,12 @@
"dependencies": {
"bufferutil": "^4.0.8",
"dashjs": "^4.7.4",
"electron-json-storage": "^4.6.0",
"extract-zip": "^2.0.1",
"hls.js": "^1.5.15",
"http": "^0.0.1-security",
"https": "^1.0.0",
"log4js": "^6.9.1",
"qrcode": "^1.5.3",
"update-electron-app": "^3.0.0",
"url": "^0.11.3",

View file

@ -1,150 +0,0 @@
import os
import hashlib
import boto3
from botocore.client import Config
import shutil
from functools import cmp_to_key
ACCOUNT_ID = os.environ.get('R2_ACCOUNT_ID')
ACCESS_KEY_ID = os.environ.get('R2_ACCESS_KEY_ID')
SECRET_ACCESS_KEY = os.environ.get('R2_SECRET_ACCESS_KEY')
BUCKET_NAME = os.environ.get('R2_BUCKET_NAME')
DEPLOY_DIR = os.environ.get('FCAST_DO_RUNNER_DEPLOY_DIR')
TEMP_DIR = os.path.join(DEPLOY_DIR, 'temp')
LOCAL_CACHE_DIR = os.path.join(DEPLOY_DIR, 'cache')
# Customizable CI parameters
CACHE_VERSION_AMOUNT = int(os.environ.get('CACHE_VERSION_AMOUNT', default="-1"))
RELEASE_CANDIDATE = bool(os.environ.get('RELEASE_CANDIDATE', default=False))
RELEASE_CANDIDATE_VERSION = int(os.environ.get('RELEASE_CANDIDATE_VERSION', default="1"))
# Utility functions
def compare_versions(x, y):
x_parts = x.split('.')
y_parts = y.split('.')
for i in range(len(x_parts)):
if x_parts[i] < y_parts[i]:
return -1
elif x_parts[i] > y_parts[i]:
return 1
return 0
# Initial setup
# Note: Cloudflare R2 docs outdated, secret is not supposed to be hashed...
# Hash the secret access key using SHA-256
#hashed_secret_key = hashlib.sha256(SECRET_ACCESS_KEY.encode()).hexdigest()
# Configure the S3 client for Cloudflare R2
s3 = boto3.client('s3',
endpoint_url=f'https://{ACCOUNT_ID}.r2.cloudflarestorage.com',
aws_access_key_id=ACCESS_KEY_ID,
# aws_secret_access_key=hashed_secret_key,
aws_secret_access_key=SECRET_ACCESS_KEY,
config=Config(
signature_version='s3v4'
)
)
list_response = s3.list_objects_v2(Bucket=BUCKET_NAME, Prefix='electron/')
bucket_files = list_response.get('Contents', [])
bucket_versions_full = sorted(set(map(lambda x: x['Key'].split('/')[1], bucket_files)), key=cmp_to_key(compare_versions), reverse=True)
bucket_versions = bucket_versions_full if CACHE_VERSION_AMOUNT < 0 else bucket_versions_full[:CACHE_VERSION_AMOUNT]
os.makedirs(TEMP_DIR, exist_ok=True)
# CI functions
def copy_artifacts_to_local_cache():
if len(os.listdir('/artifacts')) == 0:
print('No artifacts were built...')
return None
print('Copying artifacts to cache...')
# All artifact should have same version in format: /artifacts/PKG/OS/ARCH/fcast-receiver-VERSION-OS-ARCH.PKG
version = os.listdir('/artifacts/zip/linux/x64')[0].split('-')[2]
dst = os.path.join(TEMP_DIR, version)
print(f'Current app version: {version}')
shutil.copytree('/artifacts', dst, dirs_exist_ok=True, ignore=shutil.ignore_patterns('*.w*'))
for dir in os.listdir('/artifacts'):
shutil.rmtree(os.path.join('/artifacts', dir))
return version
def sync_local_cache():
print('Syncing local cache with s3...')
local_files = []
for root, _, files in os.walk(LOCAL_CACHE_DIR):
for filename in files:
rel_path = os.path.relpath(os.path.join(root, filename), LOCAL_CACHE_DIR)
version = os.path.relpath(rel_path, 'electron/').split('/')[0]
if version in bucket_versions:
local_files.append(rel_path)
else:
print(f'Purging file from local cache: {rel_path}')
os.remove(os.path.join(root, filename))
for obj in bucket_files:
filename = obj['Key']
save_path = os.path.join(LOCAL_CACHE_DIR, filename)
if filename not in local_files:
print(f'Downloading file: {filename}')
get_response = s3.get_object(Bucket=BUCKET_NAME, Key=filename)
os.makedirs(os.path.dirname(save_path), exist_ok=True)
with open(save_path, 'wb') as file:
file.write(get_response['Body'].read())
def upload_local_cache(current_version):
print('Uploading local cache to s3...')
shutil.copytree(TEMP_DIR, os.path.join(LOCAL_CACHE_DIR, 'electron'), dirs_exist_ok=True)
local_files = []
for root, _, files in os.walk(LOCAL_CACHE_DIR):
for filename in files:
full_path = os.path.join(root, filename)
rel_path = os.path.relpath(full_path, LOCAL_CACHE_DIR)
version = rel_path.split('/')[1]
if RELEASE_CANDIDATE and version == current_version:
rc_path = full_path[:full_path.rfind('.')] + f'-rc{RELEASE_CANDIDATE_VERSION}' + full_path[full_path.rfind('.'):]
os.rename(full_path, rc_path)
rel_path = os.path.relpath(rc_path, LOCAL_CACHE_DIR)
local_files.append(rel_path)
for file_path in local_files:
if file_path not in map(lambda x: x['Key'], bucket_files):
print(f'Uploading file: {file_path}')
with open(os.path.join(LOCAL_CACHE_DIR, file_path), 'rb') as file:
put_response = s3.put_object(
Body=file,
Bucket=BUCKET_NAME,
Key=file_path,
)
def generate_delta_updates(current_version):
pass
# generate html previous version browsing (based off of bucket + and local if does not have all files)
def generate_previous_releases_page():
pass
def update_website():
pass
# CI Operations
current_version = copy_artifacts_to_local_cache()
sync_local_cache()
# generate_delta_updates(current_version)
upload_local_cache(current_version)
# generate_previous_releases_page()
# update_website()
shutil.rmtree(TEMP_DIR)

View file

@ -0,0 +1 @@
from .util import *

View file

@ -0,0 +1,252 @@
import os
import hashlib
import json
import shutil
from functools import cmp_to_key
from util import BUCKET_NAME, S3Client, PackageFormat, ArtifactVersion, compare_versions, generate_update_tarball
DEPLOY_DIR = os.environ.get('FCAST_DO_RUNNER_DEPLOY_DIR')
TEMP_DIR = os.path.join(DEPLOY_DIR, 'temp')
LOCAL_CACHE_DIR = os.path.join(DEPLOY_DIR, 'cache')
BASE_DOWNLOAD_URL = BUCKET_NAME.replace('-', '.')
EXCLUDED_DELTA_VERSIONS = ["1.0.14"]
# Version tracking for migration support
RELEASES_JSON_VERSION = '1'
# Customizable CI parameters
CACHE_VERSION_AMOUNT = int(os.environ.get('CACHE_VERSION_AMOUNT', default="-1"))
s3 = S3Client(CACHE_VERSION_AMOUNT, EXCLUDED_DELTA_VERSIONS)
# CI functions
def ensure_files_exist(dirs, files):
for d in dirs:
os.makedirs(d, exist_ok=True)
for f in files:
if not os.path.exists(os.path.join(LOCAL_CACHE_DIR, f)):
s3.download_file(os.path.join(LOCAL_CACHE_DIR, f), f)
def copy_artifacts_to_local_cache():
version = None
with open(os.path.join(LOCAL_CACHE_DIR, 'electron', 'releases.json') , 'r') as file:
releases = json.load(file)
version = ArtifactVersion(releases['currentVersion'], 'stable', None)
if len(os.listdir('/artifacts')) == 0:
print('No artifacts were built...')
return version
print('Copying artifacts to cache...')
# Picking a random package that exists from the build pipeline
artifact = PackageFormat(os.listdir('/artifacts/zip/linux/x64')[0])
version = ArtifactVersion(artifact.version, artifact.channel, artifact.channel_version)
dst = os.path.join(TEMP_DIR, version.version)
shutil.copytree('/artifacts', dst, dirs_exist_ok=True, ignore=shutil.ignore_patterns('*.w*'))
for dir in os.listdir('/artifacts'):
shutil.rmtree(os.path.join('/artifacts', dir))
print(f'Current app version: {version}')
return version
def sync_local_cache():
print('Syncing local cache with s3...')
local_files = []
for root, _, files in os.walk(LOCAL_CACHE_DIR):
for filename in files:
rel_path = os.path.relpath(os.path.join(root, filename), LOCAL_CACHE_DIR)
version = os.path.relpath(rel_path, 'electron/').split('/')[0]
if version in s3.get_versions() or filename == 'releases.json':
local_files.append(rel_path)
elif filename != 'releases.json':
print(f'Purging file from local cache: {rel_path}')
os.remove(os.path.join(root, filename))
for obj in s3.get_bucket_files():
filename = obj['Key']
path = os.path.join(LOCAL_CACHE_DIR, filename)
if filename not in local_files:
s3.download_file(path, filename)
def upload_local_cache():
print('Uploading local cache to s3...')
shutil.copytree(TEMP_DIR, os.path.join(LOCAL_CACHE_DIR, 'electron'), dirs_exist_ok=True)
local_files = []
for root, _, files in os.walk(LOCAL_CACHE_DIR):
for filename in files:
full_path = os.path.join(root, filename)
rel_path = os.path.relpath(full_path, LOCAL_CACHE_DIR)
local_files.append(rel_path)
for file_path in local_files:
if file_path not in map(lambda x: x['Key'], s3.get_bucket_files()) or os.path.basename(file_path) == 'releases.json':
s3.upload_file(os.path.join(LOCAL_CACHE_DIR, file_path), file_path)
# TODO: WIP
def generate_delta_updates(artifact_version):
delta_info = {}
releases = None
with open(os.path.join(LOCAL_CACHE_DIR, 'electron', 'releases.json') , 'r') as file:
releases = json.load(file)
# Get sha digest from base version for integrity validation
print('Generating sha digests from previous updates...')
for root, _, files in os.walk(LOCAL_CACHE_DIR):
for filename in filter(lambda f: f.endswith('.zip'), files):
full_path = os.path.join(root, filename)
rel_path = os.path.relpath(full_path, os.path.join(LOCAL_CACHE_DIR, 'electron'))
package = PackageFormat(rel_path)
if package.channel != artifact_version.channel or package.version in EXCLUDED_DELTA_VERSIONS:
continue
print(f'Generating sha digests from: {full_path}')
artifact_name, digest = generate_update_tarball(full_path, rel_path, TEMP_DIR, package)
print(f'DIGEST INFO: {artifact_name} {digest}')
os_dict = delta_info.get(package.channel, {})
arch_dict = os_dict.get(package.os, {})
version_dict = arch_dict.get(package.arch, {})
delta_entry = {
'path': os.path.join(TEMP_DIR, os.path.dirname(rel_path), artifact_name),
'digest': digest,
}
version_dict[package.version] = delta_entry
arch_dict[package.arch] = version_dict
os_dict[package.os] = arch_dict
delta_info[package.channel] = os_dict
# TODO: Add limit on amount of delta patches to create (either fixed number or by memory savings)
# TODO: Parallelize bsdiff invocation since its single-threaded, provided enough RAM available
print('Generating delta updates...')
previous_versions = filter(lambda v: v not in EXCLUDED_DELTA_VERSIONS, releases['previousVersions'])
for delta_version in previous_versions:
# Create delta patches
for root, _, files in os.walk(TEMP_DIR):
for filename in filter(lambda f: f.endswith('.zip'), files):
full_path = os.path.join(root, filename)
rel_path = os.path.relpath(full_path, TEMP_DIR)
package = PackageFormat(rel_path)
if package.version in EXCLUDED_DELTA_VERSIONS:
continue
artifact_name, digest = generate_update_tarball(full_path, rel_path, TEMP_DIR, package)
base_file = delta_info[package.channel][package.os][package.arch][delta_version]['path']
new_file = os.path.join(os.path.dirname(full_path), artifact_name)
delta_file = os.path.join(os.path.dirname(full_path), f'{package.name}-{package.version}-{package.os_pretty}-{package.arch}-delta-{delta_version}.delta')
command = f'bsdiff {base_file} {new_file} {delta_file}'
print(f'temp skipping delta generation: {command}')
# print(f'Generating delta update: {command}')
# os.system(command)
# os.remove(base_file)
# os.remove(new_file)
return delta_info
def generate_releases_json(artifact_version, delta_info):
print('Generating releases.json...')
releases = None
with open(os.path.join(LOCAL_CACHE_DIR, 'electron', 'releases.json') , 'r') as file:
releases = json.load(file)
current_version = releases.get('currentVersion', '0.0.0')
current_releases = releases.get('currentReleases', {})
channel_current_versions = releases.get('channelCurrentVersions', {})
all_versions = releases.get('allVersions', [])
if current_version not in all_versions:
all_versions.append(current_version)
for root, _, files in os.walk(TEMP_DIR):
# Only offer zip and delta updates. Other packages will update from zip packages
for filename in filter(lambda f: f.endswith('.zip') or f.endswith('.delta'), files):
full_path = os.path.join(root, filename)
rel_path = os.path.relpath(full_path, TEMP_DIR)
package = PackageFormat(rel_path)
url = f'https://{BASE_DOWNLOAD_URL}/electron/{rel_path}'
digest = ''
with open(full_path, 'rb') as file:
digest = hashlib.sha256(file.read()).hexdigest()
os_dict = current_releases.get(package.channel, {})
arch_dict = os_dict.get(package.os, {})
entry_dict = arch_dict.get(package.arch, {})
if package.is_delta:
delta_dict = entry_dict.get('deltas', {})
delta_entry = {
'deltaUrl': url,
'sha256Digest': digest,
'baseVersion': package.delta_base_version,
'baseSha256Digest': delta_info[package.channel][package.os][package.arch][package.delta_base_version]['digest'],
}
delta_dict[package.delta_base_version] = delta_entry
entry_dict['deltas'] = delta_dict
else:
entry_dict['url'] = url
entry_dict['sha256Digest'] = digest
arch_dict[package.arch] = entry_dict
os_dict[package.os] = arch_dict
current_releases[package.channel] = os_dict
if package.channel != 'stable':
channel_current_versions[package.channel] = max(int(package.channel_version), channel_current_versions.get(package.channel, 0))
if artifact_version.channel == 'stable' and max([artifact_version.version, current_version], key=cmp_to_key(compare_versions)):
releases['currentVersion'] = artifact_version.version
else:
releases['currentVersion'] = current_version
releases['previousVersions'] = s3.get_versions(full=True)
releases['fileVersion'] = RELEASES_JSON_VERSION
releases['allVersions'] = all_versions
releases['channelCurrentVersions'] = channel_current_versions
releases['currentReleases'] = current_releases
with open(os.path.join(LOCAL_CACHE_DIR, 'electron', 'releases.json') , 'w') as file:
json.dump(releases, file, indent=4)
def generate_previous_releases_page():
pass
def update_website():
pass
# CI Operations
ensure_files_exist(dirs=[
'/artifacts',
DEPLOY_DIR,
TEMP_DIR,
LOCAL_CACHE_DIR,
os.path.join(LOCAL_CACHE_DIR, 'electron')
],
files=[
os.path.join('electron', 'releases.json')
])
artifact_version = copy_artifacts_to_local_cache()
sync_local_cache()
# Disabling delta update generation for now...
# delta_info = generate_delta_updates(artifact_version)
delta_info = {}
generate_releases_json(artifact_version, delta_info)
upload_local_cache()
# generate_previous_releases_page()
# update_website()
print('Cleaning up...')
shutil.rmtree(TEMP_DIR)

View file

@ -0,0 +1,211 @@
import boto3
import hashlib
import os
import requests
import shutil
from botocore.client import Config
from collections import namedtuple
from functools import cmp_to_key
CLOUDFLARE_CACHE_TOKEN = os.environ.get('CLOUDFLARE_CACHE_TOKEN')
ZONE_ID = os.environ.get('CLOUDFLARE_ZONE_ID')
ACCOUNT_ID = os.environ.get('R2_ACCOUNT_ID')
ACCESS_KEY_ID = os.environ.get('R2_ACCESS_KEY_ID')
SECRET_ACCESS_KEY = os.environ.get('R2_SECRET_ACCESS_KEY')
BUCKET_NAME = os.environ.get('R2_BUCKET_NAME')
class S3Client:
def __init__(self, cache_version_amount, excluded_delta_versions):
# Note: Cloudflare R2 docs outdated, secret is not supposed to be hashed...
# Hash the secret access key using SHA-256
#hashed_secret_key = hashlib.sha256(SECRET_ACCESS_KEY.encode()).hexdigest()
# Configure the S3 client for Cloudflare R2
self.s3 = boto3.client('s3',
endpoint_url=f'https://{ACCOUNT_ID}.r2.cloudflarestorage.com',
aws_access_key_id=ACCESS_KEY_ID,
# aws_secret_access_key=hashed_secret_key,
aws_secret_access_key=SECRET_ACCESS_KEY,
config=Config(
signature_version='s3v4'
)
)
list_response = self.s3.list_objects_v2(Bucket=BUCKET_NAME, Prefix='electron/')
self.bucket_files = list_response.get('Contents', [])
bucket_files_versions = filter(lambda x: x['Key'] != 'electron/releases.json', self.bucket_files)
self.bucket_versions_full = sorted(set(map(lambda x: x['Key'].split('/')[1], bucket_files_versions)), key=cmp_to_key(compare_versions), reverse=True)
self.bucket_versions = self.bucket_versions_full if cache_version_amount < 0 else self.bucket_versions_full[:cache_version_amount]
self.bucket_delta_versions = [v for v in self.bucket_versions if v not in excluded_delta_versions]
def get_bucket_files(self):
return self.bucket_files
def get_versions(self, full=False):
return self.bucket_versions_full if full else self.bucket_versions
def download_file(self, full_path, s3_path):
print(f'Downloading file: {s3_path}')
get_response = self.s3.get_object(Bucket=BUCKET_NAME, Key=s3_path)
os.makedirs(os.path.dirname(full_path), exist_ok=True)
with open(full_path, 'wb') as file:
file.write(get_response['Body'].read())
def upload_file(self, full_path, s3_path):
print(f'Uploading file: {s3_path}')
domain = BUCKET_NAME.replace('-', '.')
purge_response = requests.post(
f'https://api.cloudflare.com/client/v4/zones/{ZONE_ID}/purge_cache',
headers={
'Authorization': f'Bearer {CLOUDFLARE_CACHE_TOKEN}',
'Content-Type': 'application/json',
},
json={
'files': [f'https://{domain}/{s3_path}']
}
)
if purge_response.status_code != 200:
print(f'Error while purging cache: {purge_response}')
with open(full_path, 'rb') as file:
put_response = self.s3.put_object(
Body=file,
Bucket=BUCKET_NAME,
Key=s3_path,
)
# Utility types
class PackageFormat:
"""Parses an artifact path to extract package information
Artifact format: ((VERSION)?/PKG/(OS/ARCH|ARCH)/)?fcast-receiver-VERSION-OS-ARCH(-setup|-delta-DELTA_BASE_VERSION)?(-CHANNEL-CHANNEL_VERSION)?.PKG
"""
def __init__(self, path):
self.version = None
self.type = None
self.os = None
self.os_pretty = None
self.arch = None
self.name = None
self.is_delta = False
self.delta_base_version = None
self.channel = None
self.channel_version = None
dirs = path.split('/')
file = path.split('-')
self.name = 'fcast-receiver'
if len(dirs) > 1:
parse_index = 0
if dirs[parse_index].count('.') > 0:
self.version = dirs[parse_index]
self.type = dirs[parse_index + 1]
parse_index += 2
else:
self.type = dirs[parse_index]
parse_index += 1
if self.type == 'zip':
self.os = dirs[parse_index]
self.os_pretty = 'windows' if self.os == 'win32' else 'macOS' if self.os == 'darwin' else 'linux'
self.arch = dirs[parse_index + 1]
parse_index += 2
else:
if self.type == 'wix':
self.os = 'win32'
self.os_pretty = 'windows'
self.arch = dirs[parse_index]
elif self.type == 'dmg':
self.os = 'darwin'
self.os_pretty = 'macOS'
self.arch = dirs[parse_index]
elif self.type == 'deb' or self.type == 'rpm':
self.os = 'linux'
self.os_pretty = 'linux'
self.arch = dirs[parse_index]
parse_index += 1
# Unsupported package format (e.g. 1.0.14)
if self.version == '1.0.14':
return
file = dirs[parse_index].split('-')
self.version = file[2]
channel_index = 5
if len(file) == channel_index:
self.channel = 'stable'
return
if file[channel_index] == 'delta':
self.is_delta = True
self.delta_base_version = file[channel_index + 1].replace('.delta', '')
channel_index += 2
elif file[channel_index] == 'setup':
channel_index += 1
if len(file) > channel_index:
self.channel = file[channel_index]
version = file[channel_index + 1]
self.channel_version = version[:version.rfind('.')]
else:
self.channel = 'stable'
def packageNamePretty(self):
if self.channel != 'stable':
return f'{self.name}-{self.version}-{self.os_pretty}-{self.arch}-{self.channel}-{self.channel_version}'
else:
return f'{self.name}-{self.version}-{self.os_pretty}-{self.arch}'
def __str__(self) -> str:
return f'''PackageFormat(type={self.type}, version={self.version}, os={self.os}, arch={self.arch},
is_delta={self.is_delta}, delta_base_version={self.delta_base_version}, channel={self.channel},
channel_version={self.channel_version})'''
ArtifactVersion = namedtuple('ArtifactVersion', ['version', 'channel', 'channel_version'])
# Utility functions
def compare_versions(x, y):
x_parts = x.split('.')
y_parts = y.split('.')
for i in range(len(x_parts)):
if x_parts[i] < y_parts[i]:
return -1
elif x_parts[i] > y_parts[i]:
return 1
return 0
def generate_update_tarball(full_path, rel_path, working_dir, package):
if package.os == 'darwin':
temp_working_dir = os.path.join(working_dir, os.path.dirname(rel_path), f'{package.name}-{package.os}-{package.arch}')
extract_dir = temp_working_dir
else:
temp_working_dir = os.path.join(working_dir, os.path.dirname(rel_path))
extract_dir = os.path.join(temp_working_dir, f'{package.name}-{package.os}-{package.arch}')
shutil.unpack_archive(full_path, temp_working_dir)
if package.os == 'darwin':
shutil.make_archive(os.path.join(working_dir, os.path.dirname(rel_path), package.packageNamePretty()), 'tar', extract_dir)
shutil.rmtree(temp_working_dir)
temp_working_dir = os.path.join(working_dir, os.path.dirname(rel_path))
else:
shutil.make_archive(os.path.join(temp_working_dir, package.packageNamePretty()), 'tar', extract_dir)
shutil.rmtree(extract_dir)
digest = ''
artifact_name = f'{package.packageNamePretty()}.tar'
with open(os.path.join(temp_working_dir, artifact_name), 'rb') as file:
digest = hashlib.sha256(file.read()).hexdigest()
return artifact_name, digest

View file

@ -1,4 +1,4 @@
import { app } from 'electron';
import Main from './Main';
Main.main(app);
await Main.main(app);

View file

@ -1,6 +1,8 @@
import mdns from 'mdns-js';
import * as log4js from "log4js";
const cp = require('child_process');
const os = require('os');
const logger = log4js.getLogger();
export class DiscoveryService {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
@ -22,15 +24,15 @@ export class DiscoveryService {
hostname = os.hostname();
}
catch (err) {
console.warn('Error fetching hostname, trying different method...');
console.warn(err);
logger.warn('Error fetching hostname, trying different method...');
logger.warn(err);
try {
hostname = cp.execSync("hostnamectl hostname").toString().trim();
}
catch (err2) {
console.warn('Error fetching hostname again, using generic name...');
console.warn(err2);
logger.warn('Error fetching hostname again, using generic name...');
logger.warn(err2);
hostname = 'linux device';
}
@ -50,7 +52,7 @@ export class DiscoveryService {
}
const name = `FCast-${DiscoveryService.getComputerName()}`;
console.log("Discovery service started.", name);
logger.info("Discovery service started.", name);
this.serviceTcp = mdns.createAdvertisement(mdns.tcp('_fcast'), 46899, { name: name });
this.serviceTcp.start();

View file

@ -1,7 +1,9 @@
import net = require('net');
import * as net from 'net';
import * as log4js from "log4js";
import { EventEmitter } from 'node:events';
import { PlaybackErrorMessage, PlaybackUpdateMessage, PlayMessage, SeekMessage, SetSpeedMessage, SetVolumeMessage, VersionMessage, VolumeUpdateMessage } from './Packets';
import { WebSocket } from 'ws';
const logger = log4js.getLogger();
enum SessionState {
Idle = 0,
@ -47,7 +49,7 @@ export class FCastSession {
send(opcode: number, message = null) {
const json = message ? JSON.stringify(message) : null;
console.log(`send (opcode: ${opcode}, body: ${json})`);
logger.info(`send (opcode: ${opcode}, body: ${json})`);
let data: Uint8Array;
if (json) {
@ -87,7 +89,7 @@ export class FCastSession {
return;
}
console.log(`${receivedBytes.length} bytes received`);
logger.info(`${receivedBytes.length} bytes received`);
switch (this.state) {
case SessionState.WaitingForLength:
@ -97,7 +99,7 @@ export class FCastSession {
this.handlePacketBytes(receivedBytes);
break;
default:
console.log(`Data received is unhandled in current session state ${this.state}.`);
logger.info(`Data received is unhandled in current session state ${this.state}.`);
break;
}
}
@ -108,20 +110,20 @@ export class FCastSession {
receivedBytes.copy(this.buffer, this.bytesRead, 0, bytesToRead);
this.bytesRead += bytesToRead;
console.log(`handleLengthBytes: Read ${bytesToRead} bytes from packet`);
logger.info(`handleLengthBytes: Read ${bytesToRead} bytes from packet`);
if (this.bytesRead >= LENGTH_BYTES) {
this.state = SessionState.WaitingForData;
this.packetLength = this.buffer.readUInt32LE(0);
this.bytesRead = 0;
console.log(`Packet length header received from: ${this.packetLength}`);
logger.info(`Packet length header received from: ${this.packetLength}`);
if (this.packetLength > MAXIMUM_PACKET_LENGTH) {
throw new Error(`Maximum packet length is 32kB: ${this.packetLength}`);
}
if (bytesRemaining > 0) {
console.log(`${bytesRemaining} remaining bytes pushed to handlePacketBytes`);
logger.info(`${bytesRemaining} remaining bytes pushed to handlePacketBytes`);
this.handlePacketBytes(receivedBytes.slice(bytesToRead));
}
}
@ -133,10 +135,10 @@ export class FCastSession {
receivedBytes.copy(this.buffer, this.bytesRead, 0, bytesToRead);
this.bytesRead += bytesToRead;
console.log(`handlePacketBytes: Read ${bytesToRead} bytes from packet`);
logger.info(`handlePacketBytes: Read ${bytesToRead} bytes from packet`);
if (this.bytesRead >= this.packetLength) {
console.log(`Packet finished receiving from of ${this.packetLength} bytes.`);
logger.info(`Packet finished receiving from of ${this.packetLength} bytes.`);
this.handleNextPacket();
this.state = SessionState.WaitingForLength;
@ -144,14 +146,14 @@ export class FCastSession {
this.bytesRead = 0;
if (bytesRemaining > 0) {
console.log(`${bytesRemaining} remaining bytes pushed to handleLengthBytes`);
logger.info(`${bytesRemaining} remaining bytes pushed to handleLengthBytes`);
this.handleLengthBytes(receivedBytes.slice(bytesToRead));
}
}
}
private handlePacket(opcode: number, body: string | undefined) {
console.log(`handlePacket (opcode: ${opcode}, body: ${body})`);
logger.info(`handlePacket (opcode: ${opcode}, body: ${body})`);
try {
switch (opcode) {
@ -181,16 +183,16 @@ export class FCastSession {
break;
}
} catch (e) {
console.warn(`Error handling packet from.`, e);
logger.warn(`Error handling packet from.`, e);
}
}
private handleNextPacket() {
console.log(`Processing packet of ${this.bytesRead} bytes from`);
logger.info(`Processing packet of ${this.bytesRead} bytes from`);
const opcode = this.buffer[0];
const body = this.packetLength > 1 ? this.buffer.toString('utf8', 1, this.packetLength) : null;
console.log('body', body);
logger.info('body', body);
this.handlePacket(opcode, body);
}

View file

@ -9,6 +9,7 @@ import * as os from 'os';
import * as path from 'path';
import * as http from 'http';
import * as url from 'url';
import * as log4js from "log4js";
import { AddressInfo } from 'ws';
import { v4 as uuidv4 } from 'uuid';
import yargs from 'yargs';
@ -29,6 +30,7 @@ export default class Main {
static proxyServer: http.Server;
static proxyServerAddress: AddressInfo;
static proxiedFiles: Map<string, { url: string, headers: { [key: string]: string } }> = new Map();
static logger: log4js.Logger;
private static toggleMainWindow() {
if (Main.mainWindow) {
@ -52,22 +54,18 @@ export default class Main {
label: 'Check for updates',
click: async () => {
try {
const updater = new Updater(path.join(__dirname, '../'), 'https://releases.grayjay.app/fcastreceiver');
if (await updater.update()) {
if (await Updater.update()) {
const restartPrompt = await dialog.showMessageBox({
type: 'info',
title: 'Update completed',
message: 'The application has been updated. Restart now to apply the changes.',
title: 'Update ready',
message: 'Update downloaded, restart now to apply the changes.',
buttons: ['Restart'],
defaultId: 0
});
console.log('Update completed');
// Restart the app if the user clicks the 'Restart' button
if (restartPrompt.response === 0) {
Main.application.relaunch();
Main.application.exit(0);
await Updater.processUpdate();
}
} else {
await dialog.showMessageBox({
@ -82,12 +80,12 @@ export default class Main {
await dialog.showMessageBox({
type: 'error',
title: 'Failed to update',
message: 'The application failed to update.',
message: err,
buttons: ['OK'],
defaultId: 0
});
console.error('Failed to update:', err);
Main.logger.error('Failed to update:', err);
}
},
},
@ -98,7 +96,7 @@ export default class Main {
label: 'Restart',
click: () => {
this.application.relaunch();
this.application.exit(0);
this.application.exit();
}
},
{
@ -220,11 +218,11 @@ export default class Main {
private static setupProxyServer(): Promise<void> {
return new Promise<void>((resolve, reject) => {
try {
console.log(`Proxy server starting`);
Main.logger.info(`Proxy server starting`);
const port = 0;
Main.proxyServer = http.createServer((req, res) => {
console.log(`Request received`);
Main.logger.info(`Request received`);
const requestUrl = `http://${req.headers.host}${req.url}`;
const proxyInfo = Main.proxiedFiles.get(requestUrl);
@ -265,7 +263,7 @@ export default class Main {
req.pipe(proxyReq, { end: true });
proxyReq.on('error', (e) => {
console.error(`Problem with request: ${e.message}`);
Main.logger.error(`Problem with request: ${e.message}`);
res.writeHead(500);
res.end();
});
@ -275,7 +273,7 @@ export default class Main {
});
Main.proxyServer.listen(port, '127.0.0.1', () => {
Main.proxyServerAddress = Main.proxyServer.address() as AddressInfo;
console.log(`Proxy server running at http://127.0.0.1:${Main.proxyServerAddress.port}/`);
Main.logger.info(`Proxy server running at http://127.0.0.1:${Main.proxyServerAddress.port}/`);
resolve();
});
} catch (e) {
@ -303,7 +301,7 @@ export default class Main {
}
const proxiedUrl = `http://127.0.0.1:${Main.proxyServerAddress.port}/${uuidv4()}`;
console.log("Proxied url", { proxiedUrl, url, headers });
Main.logger.info("Proxied url", { proxiedUrl, url, headers });
Main.proxiedFiles.set(proxiedUrl, { url: url, headers: headers });
return proxiedUrl;
}
@ -356,9 +354,27 @@ export default class Main {
});
}
static main(app: Electron.App) {
static async main(app: Electron.App) {
Main.application = app;
const argv = yargs(hideBin(process.argv))
const fileLogType = Updater.isUpdating() ? 'fileSync' : 'file';
log4js.configure({
appenders: {
out: { type: 'stdout' },
log: { type: fileLogType, filename: path.join(app.getPath('logs'), 'fcast-receiver.log'), flags: 'w' },
},
categories: {
default: { appenders: ['out', 'log'], level: 'info' },
},
});
Main.logger = log4js.getLogger();
Main.logger.info(`Starting application: ${app.name} (${app.getVersion()}) | ${app.getAppPath()}`);
if (Updater.isUpdating()) {
await Updater.processUpdate();
}
const argv = yargs(hideBin(process.argv))
.parserConfiguration({
'boolean-negation': false
})

View file

@ -0,0 +1,35 @@
/* eslint-disable @typescript-eslint/no-explicit-any */
import storage from 'electron-json-storage';
import { app } from 'electron';
import * as log4js from "log4js";
const logger = log4js.getLogger();
export class Store {
private static storeVersion = 1;
private static userSettings = 'UserSettings';
private static settingsCache: any = null;
static {
storage.setDataPath(app.getPath('userData'));
Store.settingsCache = storage.getSync(Store.userSettings);
if (Store.get('storeVersion') === null) {
Store.set('storeVersion', Store.storeVersion);
}
}
public static get(key: string): any {
return Store.settingsCache[key] ?? null;
}
public static set(key: string, value: any) {
Store.settingsCache[key] = value;
logger.info(`Writing settings file: key '${key}', value ${JSON.stringify(value)}`);
storage.set(Store.userSettings, Store.settingsCache, (err) => {
if (err) {
logger.error(`Error writing user settings: ${err}`);
}
});
}
}

View file

@ -1,4 +1,4 @@
import net = require('net');
import * as net from 'net';
import { FCastSession, Opcode } from './FCastSession';
import { EventEmitter } from 'node:events';
import { dialog } from 'electron';
@ -39,14 +39,14 @@ export class TcpListenerService {
try {
session.send(opcode, message);
} catch (e) {
console.warn("Failed to send error.", e);
Main.logger.warn("Failed to send error.", e);
session.close();
}
});
}
private async handleServerError(err: NodeJS.ErrnoException) {
console.error("Server error:", err);
Main.logger.error("Server error:", err);
const restartPrompt = await dialog.showMessageBox({
type: 'error',
@ -66,14 +66,14 @@ export class TcpListenerService {
}
private handleConnection(socket: net.Socket) {
console.log(`new connection from ${socket.remoteAddress}:${socket.remotePort}`);
Main.logger.info(`new connection from ${socket.remoteAddress}:${socket.remotePort}`);
const session = new FCastSession(socket, (data) => socket.write(data));
session.bindEvents(this.emitter);
this.sessions.push(session);
socket.on("error", (err) => {
console.warn(`Error from ${socket.remoteAddress}:${socket.remotePort}.`, err);
Main.logger.warn(`Error from ${socket.remoteAddress}:${socket.remotePort}.`, err);
socket.destroy();
});
@ -81,7 +81,7 @@ export class TcpListenerService {
try {
session.processBytes(buffer);
} catch (e) {
console.warn(`Error while handling packet from ${socket.remoteAddress}:${socket.remotePort}.`, e);
Main.logger.warn(`Error while handling packet from ${socket.remoteAddress}:${socket.remotePort}.`, e);
socket.end();
}
});
@ -94,10 +94,10 @@ export class TcpListenerService {
});
try {
console.log('Sending version');
Main.logger.info('Sending version');
session.send(Opcode.Version, {version: 2});
} catch (e) {
console.log('Failed to send version');
Main.logger.info('Failed to send version', e);
}
}
}

View file

@ -1,41 +1,50 @@
import * as fs from 'fs';
import * as https from 'https';
import * as path from 'path';
import { URL } from 'url';
import * as crypto from 'crypto';
import * as log4js from "log4js";
import { app } from 'electron';
import { Store } from './Store';
const extract = require('extract-zip');
const logger = log4js.getLogger();
enum UpdateState {
Copy,
Cleanup,
};
interface ReleaseInfo {
previousVersions: [string];
currentVersion: string;
currentReleases: [
string: [ // channel
string: [ // os
string: [ // arch
string: []
]
]
]
];
channelCurrentVersions: [string: number];
allVersions: [string];
}
interface UpdateInfo {
updateState: UpdateState;
installPath: string;
tempPath: string;
currentVersion: string;
}
export class Updater {
private basePath: string;
private baseUrl: string;
private appFiles: string[];
private static appPath: string = app.getAppPath();
private static installPath: string = path.join(Updater.appPath, '../../');
private static updateDataPath: string = path.join(app.getPath('userData'), 'updater');
private static updateMetadataPath = path.join(Updater.updateDataPath, './update.json');
private static baseUrl: string = 'https://dl.fcast.org/electron';
constructor(basePath: string, baseUrl: string) {
this.basePath = basePath;
this.baseUrl = baseUrl;
this.appFiles = [
'dist/main/c.mp4',
'dist/main/index.html',
'dist/main/preload.js',
'dist/main/qrcode.min.js',
'dist/main/renderer.js',
'dist/main/style.css',
'dist/main/video-js.min.css',
'dist/main/video.min.js',
'dist/player/index.html',
'dist/player/preload.js',
'dist/player/renderer.js',
'dist/player/style.css',
'dist/player/video-js.min.css',
'dist/player/video.min.js',
'dist/app.ico',
'dist/app.png',
'dist/bundle.js',
'package.json'
];
}
private async fetchJSON(url: string): Promise<any> {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
private static async fetchJSON(url: string): Promise<any> {
return new Promise((resolve, reject) => {
https.get(url, (res) => {
let data = '';
@ -56,7 +65,7 @@ export class Updater {
});
}
private async downloadFile(url: string, destination: string): Promise<void> {
private static async downloadFile(url: string, destination: string): Promise<void> {
return new Promise((resolve, reject) => {
const file = fs.createWriteStream(destination);
https.get(url, (response) => {
@ -72,37 +81,148 @@ export class Updater {
});
}
private compareVersions(v1: string, v2: string): number {
const v1Parts = v1.split('.').map(Number);
const v2Parts = v2.split('.').map(Number);
private static getDownloadFile(version: string) {
let target: string = process.platform; // linux
for (let i = 0; i < v1Parts.length; i++) {
if (v1Parts[i] > v2Parts[i]) {
return 1;
} else if (v1Parts[i] < v2Parts[i]) {
return -1;
}
if (process.platform === 'win32') {
target = 'windows';
}
else if (process.platform === 'darwin') {
target = 'macOS';
}
return 0;
return `fcast-receiver-${version}-${target}-${process.arch}.zip`;
}
public async update(): Promise<Boolean> {
console.log("Updater invoked", { baseUrl: this.baseUrl, basePath: this.basePath });
public static isUpdating() {
return fs.existsSync(Updater.updateMetadataPath);
}
const localPackage = JSON.parse(fs.readFileSync(path.join(this.basePath, './package.json'), 'utf-8'));
const remotePackage = await this.fetchJSON(`${this.baseUrl}/package.json`.toString());
public static async processUpdate(): Promise<void> {
const updateInfo: UpdateInfo = JSON.parse(fs.readFileSync(Updater.updateMetadataPath, 'utf-8'));
const extractionDir = process.platform === 'darwin' ? 'FCast Receiver.app' : `fcast-receiver-${process.platform}-${process.arch}`;
console.log('Update check', { localVersion: localPackage.version, remoteVersion: remotePackage.version });
if (this.compareVersions(remotePackage.version, localPackage.version) === 1) {
for (const file of this.appFiles) {
const fileUrl = `${this.baseUrl}/${file}`;
const destination = path.join(this.basePath, file);
switch (updateInfo.updateState) {
case UpdateState.Copy: {
const binaryName = process.platform === 'win32' ? 'fcast-receiver.exe' : 'fcast-receiver';
console.log(`Downloading '${fileUrl}' to '${destination}'.`);
await this.downloadFile(fileUrl.toString(), destination);
if (Updater.installPath === updateInfo.installPath) {
logger.info('Update in progress. Restarting application to perform update...')
const updateBinPath = process.platform === 'darwin' ? path.join(updateInfo.tempPath, extractionDir) : path.join(updateInfo.tempPath, extractionDir, binaryName);
log4js.shutdown();
app.relaunch({ execPath: updateBinPath });
app.exit();
}
try {
logger.info('Updater process started...');
const src = path.join(updateInfo.tempPath, extractionDir);
logger.info(`Copying files from update directory ${src} to install directory ${updateInfo.installPath}`);
// Electron runtime sees .asar file as directory and causes errors during copy
process.noAsar = true
fs.cpSync(src, updateInfo.installPath, { recursive: true, force: true });
process.noAsar = false
updateInfo.updateState = UpdateState.Cleanup;
await fs.promises.writeFile(Updater.updateMetadataPath, JSON.stringify(updateInfo));
const installBinPath = path.join(updateInfo.installPath, binaryName);
log4js.shutdown();
app.relaunch({ execPath: installBinPath });
app.exit();
}
catch (err) {
logger.error('Error while applying update...');
logger.error(err);
log4js.shutdown();
app.exit();
}
return;
}
case UpdateState.Cleanup: {
try {
logger.info('Performing update cleanup...')
// Electron runtime sees .asar file as directory and causes errors during copy
process.noAsar = true
fs.rmSync(path.join(Updater.updateDataPath, extractionDir), { recursive: true, force: true });
process.noAsar = false
fs.rmSync(path.join(Updater.updateDataPath, Updater.getDownloadFile(updateInfo.currentVersion)));
fs.rmSync(Updater.updateMetadataPath);
}
catch (err) {
logger.error('Error while performing update cleanup...');
logger.error(err);
}
log4js.shutdown();
app.relaunch();
app.exit();
return;
}
}
}
public static async update(): Promise<boolean> {
logger.info('Updater invoked');
if (!fs.existsSync(Updater.updateDataPath)) {
fs.mkdirSync(Updater.updateDataPath);
}
const localPackage = JSON.parse(fs.readFileSync(path.join(Updater.appPath, './package.json'), 'utf-8'));
const releases = await Updater.fetchJSON(`${Updater.baseUrl}/releases.json`.toString()) as ReleaseInfo;
let updaterSettings = Store.get('updater');
if (updaterSettings === null) {
updaterSettings = {
'channel': localPackage.channel,
}
Store.set('updater', updaterSettings);
}
const localChannelVersion: number = localPackage.channelVersion ? localPackage.channelVersion : 0
const currentChannelVersion: number = releases.channelCurrentVersions[localPackage.channel] ? releases.channelCurrentVersions[localPackage.channel] : 0
logger.info('Update check', { channel: localPackage.channel, localVersion: localPackage.version, currentVersion: releases.currentVersion });
if (localPackage.version !== releases.currentVersion || (localPackage.channel !== 'stable' && localChannelVersion < currentChannelVersion)) {
const channel = localPackage.version !== releases.currentVersion ? 'stable' : localPackage.channel;
const file = Updater.getDownloadFile(releases.currentVersion);
const fileInfo = releases.currentReleases[channel][process.platform][process.arch]
const destination = path.join(Updater.updateDataPath, file);
logger.info(`Downloading '${fileInfo.url}' to '${destination}'.`);
await Updater.downloadFile(fileInfo.url.toString(), destination);
const downloadedFile = await fs.promises.readFile(destination);
const hash = crypto.createHash('sha256').end(downloadedFile).digest('hex');
if (fileInfo.sha256Digest !== hash) {
const message = 'Update failed integrity check. Please try checking for updates again or downloading the update manually.';
logger.error(`Update failed integrity check. Expected hash: ${fileInfo.sha256Digest}, actual hash: ${hash}`);
throw message;
}
// Electron runtime sees .asar file as directory and causes errors during extraction
logger.info('Extracting update...');
process.noAsar = true
await extract(destination, { dir: path.dirname(destination) });
process.noAsar = false
logger.info('Extraction complete.');
const updateInfo: UpdateInfo = {
updateState: UpdateState.Copy,
installPath: Updater.installPath,
tempPath: path.dirname(destination),
currentVersion: releases.currentVersion,
};
await fs.promises.writeFile(Updater.updateMetadataPath, JSON.stringify(updateInfo));
logger.info('Written update metadata.');
return true;
}

View file

@ -38,14 +38,14 @@ export class WebSocketListenerService {
try {
session.send(opcode, message);
} catch (e) {
console.warn("Failed to send error.", e);
Main.logger.warn("Failed to send error.", e);
session.close();
}
});
}
private async handleServerError(err: NodeJS.ErrnoException) {
console.error("Server error:", err);
Main.logger.error("Server error:", err);
const restartPrompt = await dialog.showMessageBox({
type: 'error',
@ -65,14 +65,14 @@ export class WebSocketListenerService {
}
private handleConnection(socket: WebSocket) {
console.log('New WebSocket connection');
Main.logger.info('New WebSocket connection');
const session = new FCastSession(socket, (data) => socket.send(data));
session.bindEvents(this.emitter);
this.sessions.push(session);
socket.on("error", (err) => {
console.warn(`Error.`, err);
Main.logger.warn(`Error.`, err);
session.close();
});
@ -81,16 +81,16 @@ export class WebSocketListenerService {
if (data instanceof Buffer) {
session.processBytes(data);
} else {
console.warn("Received unhandled string message", data);
Main.logger.warn("Received unhandled string message", data);
}
} catch (e) {
console.warn(`Error while handling packet.`, e);
Main.logger.warn(`Error while handling packet.`, e);
session.close();
}
});
socket.on("close", () => {
console.log('WebSocket connection closed');
Main.logger.info('WebSocket connection closed');
const index = this.sessions.indexOf(session);
if (index != -1) {
@ -99,10 +99,10 @@ export class WebSocketListenerService {
});
try {
console.log('Sending version');
Main.logger.info('Sending version');
session.send(Opcode.Version, {version: 2});
} catch (e) {
console.log('Failed to send version');
Main.logger.info('Failed to send version');
}
}
}

View file

@ -1,7 +1,7 @@
{
"compilerOptions": {
"target": "es5",
"module": "commonjs",
"target": "ES2022",
"module": "ES2022",
"moduleResolution": "node",
"sourceMap": false,
"emitDecoratorMetadata": true,