mirror of
https://gitlab.com/futo-org/fcast.git
synced 2025-06-24 21:25:23 +00:00
Merge branch 'michael/updater' into 'v3'
Reworked application updater See merge request videostreaming/fcast!5
This commit is contained in:
commit
e5232c1358
21 changed files with 1377 additions and 368 deletions
|
@ -71,6 +71,7 @@ deploy:
|
|||
before_script:
|
||||
- cd receivers/electron
|
||||
- pip install boto3
|
||||
- pip install requests
|
||||
script:
|
||||
- python3 scripts/deploy.py
|
||||
- python3 scripts/deploy/deploy.py
|
||||
when: manual
|
||||
|
|
|
@ -53,8 +53,8 @@ module.exports = {
|
|||
},
|
||||
background: './assets/images/background.png',
|
||||
contents: [
|
||||
{ 'x': 190, 'y': 350, 'type': 'file', 'path': `out/${APPLICATION_NAME}-darwin-${argv.arch}/${APPLICATION_TITLE}.app` },
|
||||
{ 'x': 460, 'y': 350, 'type': 'link', 'path': '/Applications' },
|
||||
{ 'x': 90, 'y': 350, 'type': 'file', 'path': `out/${APPLICATION_NAME}-darwin-${argv.arch}/${APPLICATION_TITLE}.app` },
|
||||
{ 'x': 360, 'y': 350, 'type': 'link', 'path': '/Applications' },
|
||||
{ 'x': 0, 'y': 540, 'type': 'position', 'path': '.background' },
|
||||
{ 'x': 120, 'y': 540, 'type': 'position', 'path': '.VolumeIcon.icns' }
|
||||
],
|
||||
|
@ -95,7 +95,16 @@ module.exports = {
|
|||
},
|
||||
],
|
||||
hooks: {
|
||||
postPackage: async (config, packageResults) => {
|
||||
readPackageJson: async (forgeConfig, packageJson) => {
|
||||
packageJson.channel = process.env.FCAST_CHANNEL ? process.env.FCAST_CHANNEL : 'stable';
|
||||
if (packageJson.channel !== 'stable') {
|
||||
packageJson.channelVersion = process.env.FCAST_CHANNEL_VERSION ? process.env.FCAST_CHANNEL_VERSION : '1';
|
||||
}
|
||||
|
||||
return packageJson;
|
||||
},
|
||||
|
||||
postPackage: async (config, packageResults) => {
|
||||
switch (packageResults.platform) {
|
||||
case "darwin": {
|
||||
let artifactName = `${APPLICATION_NAME}.app`;
|
||||
|
@ -108,6 +117,7 @@ module.exports = {
|
|||
break;
|
||||
}
|
||||
},
|
||||
|
||||
postMake: async (forgeConfig, makeResults) => {
|
||||
for (const e of makeResults) {
|
||||
// Standardize artifact output naming
|
||||
|
@ -115,12 +125,12 @@ module.exports = {
|
|||
case "win32": {
|
||||
let artifactName = `${APPLICATION_NAME}-win32-${e.arch}-${e.packageJSON.version}.zip`;
|
||||
if (fs.existsSync(`./out/make/zip/win32/${e.arch}/${artifactName}`)) {
|
||||
fs.renameSync(`./out/make/zip/win32/${e.arch}/${artifactName}`, `./out/make/zip/win32/${e.arch}/${APPLICATION_NAME}-${e.packageJSON.version}-windows-${e.arch}.zip`);
|
||||
fs.renameSync(`./out/make/zip/win32/${e.arch}/${artifactName}`, path.join(`./out/make/zip/win32/${e.arch}`, generateArtifactName(e.packageJSON, e.platform, e.arch, 'zip')));
|
||||
}
|
||||
|
||||
artifactName = `${APPLICATION_NAME}.msi`;
|
||||
if (fs.existsSync(`./out/make/wix/${e.arch}/${artifactName}`)) {
|
||||
fs.renameSync(`./out/make/wix/${e.arch}/${artifactName}`, `./out/make/wix/${e.arch}/${APPLICATION_NAME}-${e.packageJSON.version}-windows-${e.arch}-setup.msi`);
|
||||
fs.renameSync(`./out/make/wix/${e.arch}/${artifactName}`, path.join(`./out/make/wix/${e.arch}`, generateArtifactName(e.packageJSON, e.platform, e.arch, 'msi')));
|
||||
}
|
||||
|
||||
break;
|
||||
|
@ -129,12 +139,11 @@ module.exports = {
|
|||
let artifactName = `${APPLICATION_TITLE}.dmg`;
|
||||
if (fs.existsSync(`./out/make/${artifactName}`)) {
|
||||
fs.mkdirSync(`./out/make/dmg/${e.arch}`, { recursive: true });
|
||||
fs.renameSync(`./out/make/${artifactName}`, `./out/make/dmg/${e.arch}/${APPLICATION_NAME}-${e.packageJSON.version}-macOS-${e.arch}.dmg`);
|
||||
fs.renameSync(`./out/make/${artifactName}`, path.join(`./out/make/dmg/${e.arch}`, generateArtifactName(e.packageJSON, e.platform, e.arch, 'dmg')));
|
||||
}
|
||||
|
||||
console.log(`Making a zip distributable for ${e.platform}/${e.arch}`);
|
||||
const zipName = `${APPLICATION_NAME}-${e.packageJSON.version}-macOS-${e.arch}.zip`;
|
||||
const zipPath = path.resolve(process.cwd(), 'out', 'make', 'zip', e.platform, e.arch, zipName);
|
||||
const zipPath = path.resolve(process.cwd(), 'out', 'make', 'zip', e.platform, e.arch, generateArtifactName(e.packageJSON, e.platform, e.arch, 'zip'));
|
||||
|
||||
exec(`mkdir -p ${path.dirname(zipPath)}`, execOutput);
|
||||
exec(`cd out/${APPLICATION_NAME}-${e.platform}-${e.arch}; zip -r -y "${zipPath}" "${APPLICATION_TITLE}.app"`, execOutput);
|
||||
|
@ -149,27 +158,27 @@ const zipName = `${APPLICATION_NAME}-${e.packageJSON.version}-macOS-${e.arch}.zi
|
|||
// await extract(`./out/make/zip/linux/${e.arch}/${artifactName}`, { dir: `${process.cwd()}/out/make/zip/linux/${e.arch}/` });
|
||||
// fs.chownSync(`${process.cwd()}/out/make/zip/linux/${e.arch}/${APPLICATION_NAME}-linux-${e.arch}/chrome-sandbox`, 0, 0);
|
||||
// fs.chmodSync(`${process.cwd()}/out/make/zip/linux/${e.arch}/${APPLICATION_NAME}-linux-${e.arch}/chrome-sandbox`, 4755);
|
||||
fs.renameSync(`./out/make/zip/linux/${e.arch}/${artifactName}`, `./out/make/zip/linux/${e.arch}/${APPLICATION_NAME}-${e.packageJSON.version}-linux-${e.arch}.zip`);
|
||||
fs.renameSync(`./out/make/zip/linux/${e.arch}/${artifactName}`, path.join(`./out/make/zip/linux/${e.arch}`, generateArtifactName(e.packageJSON, e.platform, e.arch, 'zip')));
|
||||
}
|
||||
|
||||
artifactName = `${APPLICATION_NAME}_${e.packageJSON.version}_amd64.deb`
|
||||
if (fs.existsSync(`./out/make/deb/${e.arch}/${artifactName}`)) {
|
||||
fs.renameSync(`./out/make/deb/${e.arch}/${artifactName}`, `./out/make/deb/${e.arch}/${APPLICATION_NAME}-${e.packageJSON.version}-linux-${e.arch}.deb`);
|
||||
fs.renameSync(`./out/make/deb/${e.arch}/${artifactName}`, path.join(`./out/make/deb/${e.arch}`, generateArtifactName(e.packageJSON, e.platform, e.arch, 'deb')));
|
||||
}
|
||||
|
||||
artifactName = `${APPLICATION_NAME}_${e.packageJSON.version}_arm64.deb`
|
||||
if (fs.existsSync(`./out/make/deb/${e.arch}/${artifactName}`)) {
|
||||
fs.renameSync(`./out/make/deb/${e.arch}/${artifactName}`, `./out/make/deb/${e.arch}/${APPLICATION_NAME}-${e.packageJSON.version}-linux-${e.arch}.deb`);
|
||||
fs.renameSync(`./out/make/deb/${e.arch}/${artifactName}`, path.join(`./out/make/deb/${e.arch}`, generateArtifactName(e.packageJSON, e.platform, e.arch, 'deb')));
|
||||
}
|
||||
|
||||
artifactName = `${APPLICATION_NAME}-${e.packageJSON.version}-1.x86_64.rpm`
|
||||
if (fs.existsSync(`./out/make/rpm/${e.arch}/${artifactName}`)) {
|
||||
fs.renameSync(`./out/make/rpm/${e.arch}/${artifactName}`, `./out/make/rpm/${e.arch}/${APPLICATION_NAME}-${e.packageJSON.version}-linux-${e.arch}.rpm`);
|
||||
fs.renameSync(`./out/make/rpm/${e.arch}/${artifactName}`, path.join(`./out/make/rpm/${e.arch}`, generateArtifactName(e.packageJSON, e.platform, e.arch, 'rpm')));
|
||||
}
|
||||
|
||||
artifactName = `${APPLICATION_NAME}-${e.packageJSON.version}-1.arm64.rpm`
|
||||
if (fs.existsSync(`./out/make/rpm/${e.arch}/${artifactName}`)) {
|
||||
fs.renameSync(`./out/make/rpm/${e.arch}/${artifactName}`, `./out/make/rpm/${e.arch}/${APPLICATION_NAME}-${e.packageJSON.version}-linux-${e.arch}.rpm`);
|
||||
fs.renameSync(`./out/make/rpm/${e.arch}/${artifactName}`, path.join(`./out/make/rpm/${e.arch}`, generateArtifactName(e.packageJSON, e.platform, e.arch, 'rpm')));
|
||||
}
|
||||
|
||||
break;
|
||||
|
@ -193,7 +202,7 @@ const zipName = `${APPLICATION_NAME}-${e.packageJSON.version}-macOS-${e.arch}.zi
|
|||
[FuseV1Options.EnableCookieEncryption]: true,
|
||||
[FuseV1Options.EnableNodeOptionsEnvironmentVariable]: false,
|
||||
[FuseV1Options.EnableNodeCliInspectArguments]: false,
|
||||
[FuseV1Options.EnableEmbeddedAsarIntegrityValidation]: true,
|
||||
[FuseV1Options.EnableEmbeddedAsarIntegrityValidation]: false,
|
||||
[FuseV1Options.OnlyLoadAppFromAsar]: true,
|
||||
}),
|
||||
],
|
||||
|
@ -205,3 +214,26 @@ function execOutput(err, stdout, stderr) {
|
|||
}
|
||||
console.log(stdout);
|
||||
}
|
||||
|
||||
function getArtifactOS(platform) {
|
||||
switch (platform) {
|
||||
case 'win32':
|
||||
return 'windows';
|
||||
case 'darwin':
|
||||
return 'macOS';
|
||||
default:
|
||||
return platform;
|
||||
}
|
||||
}
|
||||
|
||||
function generateArtifactName(packageJSON, platform, arch, extension) {
|
||||
let artifactName = `${APPLICATION_NAME}-${packageJSON.version}-${getArtifactOS(platform)}-${arch}`;
|
||||
if (extension === 'msi') {
|
||||
artifactName += '-setup';
|
||||
}
|
||||
if (packageJSON.channel !== 'stable') {
|
||||
artifactName += `-${packageJSON.channel}-${packageJSON.channelVersion}`;
|
||||
}
|
||||
artifactName += `.${extension}`
|
||||
return artifactName;
|
||||
}
|
||||
|
|
182
receivers/electron/package-lock.json
generated
182
receivers/electron/package-lock.json
generated
|
@ -1,20 +1,23 @@
|
|||
{
|
||||
"name": "fcast-receiver",
|
||||
"version": "1.9.0",
|
||||
"version": "2.0.0",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "fcast-receiver",
|
||||
"version": "1.9.0",
|
||||
"version": "2.0.0",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@vscode/sudo-prompt": "^9.3.1",
|
||||
"bufferutil": "^4.0.8",
|
||||
"dashjs": "^4.7.4",
|
||||
"electron-json-storage": "^4.6.0",
|
||||
"extract-zip": "^2.0.1",
|
||||
"hls.js": "^1.5.15",
|
||||
"http": "^0.0.1-security",
|
||||
"https": "^1.0.0",
|
||||
"log4js": "^6.9.1",
|
||||
"qrcode": "^1.5.3",
|
||||
"update-electron-app": "^3.0.0",
|
||||
"url": "^0.11.3",
|
||||
|
@ -34,6 +37,7 @@
|
|||
"@electron/fuses": "^1.8.0",
|
||||
"@eslint/js": "^9.10.0",
|
||||
"@futo/forge-maker-wix-linux": "^7.5.0",
|
||||
"@types/electron-json-storage": "^4.5.4",
|
||||
"@types/jest": "^29.5.11",
|
||||
"@types/mdns": "^0.0.38",
|
||||
"@types/node-forge": "^1.3.10",
|
||||
|
@ -2613,6 +2617,13 @@
|
|||
"@types/responselike": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/electron-json-storage": {
|
||||
"version": "4.5.4",
|
||||
"resolved": "https://registry.npmjs.org/@types/electron-json-storage/-/electron-json-storage-4.5.4.tgz",
|
||||
"integrity": "sha512-M0ZzFYFOFejheqF900ux/CyzKC6Gex28rfeg22+MKxR9E82wU37bWrgboe8KHD63igpORmVjjmsZo6qDfkxdQQ==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@types/estree": {
|
||||
"version": "1.0.6",
|
||||
"resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.6.tgz",
|
||||
|
@ -3054,6 +3065,12 @@
|
|||
"url": "https://opencollective.com/eslint"
|
||||
}
|
||||
},
|
||||
"node_modules/@vscode/sudo-prompt": {
|
||||
"version": "9.3.1",
|
||||
"resolved": "https://registry.npmjs.org/@vscode/sudo-prompt/-/sudo-prompt-9.3.1.tgz",
|
||||
"integrity": "sha512-9ORTwwS74VaTn38tNbQhsA5U44zkJfcb0BdTSyyG6frP4e8KMtHuTXYmwefe5dpL8XB1aGSIVTaLjD3BbWb5iA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@webassemblyjs/ast": {
|
||||
"version": "1.12.1",
|
||||
"resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.12.1.tgz",
|
||||
|
@ -3677,7 +3694,6 @@
|
|||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
|
||||
"integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/base32-encode": {
|
||||
|
@ -3791,7 +3807,6 @@
|
|||
"version": "1.1.11",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
|
||||
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"balanced-match": "^1.0.0",
|
||||
|
@ -4413,7 +4428,6 @@
|
|||
"version": "0.0.1",
|
||||
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
|
||||
"integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/convert-source-map": {
|
||||
|
@ -4453,9 +4467,9 @@
|
|||
"license": "MIT"
|
||||
},
|
||||
"node_modules/cross-spawn": {
|
||||
"version": "7.0.3",
|
||||
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz",
|
||||
"integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==",
|
||||
"version": "7.0.5",
|
||||
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.5.tgz",
|
||||
"integrity": "sha512-ZVJrKKYunU38/76t0RMOulHOnUcbU9GbpWKAOZ0mhjr7CX6FVrH+4FrAapSOekrgFQ3f/8gwMEuIft0aKq6Hug==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
|
@ -4563,6 +4577,15 @@
|
|||
"integrity": "sha512-bCK/2Z4zLidyB4ReuIsvALH6w31YfAQDmXMqMx6FyfHqvBxtjC0eRumeSu4Bs3XtXwpyIywtSTrVT99BxY1f9w==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/date-format": {
|
||||
"version": "4.0.14",
|
||||
"resolved": "https://registry.npmjs.org/date-format/-/date-format-4.0.14.tgz",
|
||||
"integrity": "sha512-39BOQLs9ZjKh0/patS9nrT8wc3ioX3/eA/zgbKNopnF2wCqJEoxywwwElATYvRsXdnOxA/OQeQoFZ3rFjVajhg==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/debug": {
|
||||
"version": "4.3.7",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.7.tgz",
|
||||
|
@ -5274,6 +5297,65 @@
|
|||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/electron-json-storage": {
|
||||
"version": "4.6.0",
|
||||
"resolved": "https://registry.npmjs.org/electron-json-storage/-/electron-json-storage-4.6.0.tgz",
|
||||
"integrity": "sha512-gAgNsnA7tEtV9LzzOnZTyVIb3cQtCva+bEBVT5pbRGU8ZSZTVKPBrTxIAYjeVfdSjyNXgfb1mr/CZrOJgeHyqg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"async": "^2.0.0",
|
||||
"lockfile": "^1.0.4",
|
||||
"lodash": "^4.0.1",
|
||||
"mkdirp": "^0.5.1",
|
||||
"rimraf": "^2.5.1",
|
||||
"write-file-atomic": "^2.4.2"
|
||||
}
|
||||
},
|
||||
"node_modules/electron-json-storage/node_modules/async": {
|
||||
"version": "2.6.4",
|
||||
"resolved": "https://registry.npmjs.org/async/-/async-2.6.4.tgz",
|
||||
"integrity": "sha512-mzo5dfJYwAn29PeiJ0zvwTo04zj8HDJj0Mn8TD7sno7q12prdbnasKJHhkm2c1LgrhlJ0teaea8860oxi51mGA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"lodash": "^4.17.14"
|
||||
}
|
||||
},
|
||||
"node_modules/electron-json-storage/node_modules/mkdirp": {
|
||||
"version": "0.5.6",
|
||||
"resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz",
|
||||
"integrity": "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"minimist": "^1.2.6"
|
||||
},
|
||||
"bin": {
|
||||
"mkdirp": "bin/cmd.js"
|
||||
}
|
||||
},
|
||||
"node_modules/electron-json-storage/node_modules/rimraf": {
|
||||
"version": "2.7.1",
|
||||
"resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz",
|
||||
"integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==",
|
||||
"deprecated": "Rimraf versions prior to v4 are no longer supported",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"glob": "^7.1.3"
|
||||
},
|
||||
"bin": {
|
||||
"rimraf": "bin.js"
|
||||
}
|
||||
},
|
||||
"node_modules/electron-json-storage/node_modules/write-file-atomic": {
|
||||
"version": "2.4.3",
|
||||
"resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-2.4.3.tgz",
|
||||
"integrity": "sha512-GaETH5wwsX+GcnzhPgKcKjJ6M2Cq3/iZp1WyY/X1CSqrW+jVNM9Y7D8EC2sM4ZG/V8wZlSniJnCKWPmBYAucRQ==",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"graceful-fs": "^4.1.11",
|
||||
"imurmurhash": "^0.1.4",
|
||||
"signal-exit": "^3.0.2"
|
||||
}
|
||||
},
|
||||
"node_modules/electron-to-chromium": {
|
||||
"version": "1.5.41",
|
||||
"resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.41.tgz",
|
||||
|
@ -6142,7 +6224,6 @@
|
|||
"version": "3.3.1",
|
||||
"resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.1.tgz",
|
||||
"integrity": "sha512-X8cqMLLie7KsNUDSdzeN8FYK9rEt4Dt67OsG/DNGnYTSDBG4uFAJFBnUeiV+zCVAvwFy56IjM9sH51jVaEhNxw==",
|
||||
"dev": true,
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/flora-colossus": {
|
||||
|
@ -6243,7 +6324,6 @@
|
|||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
|
||||
"integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==",
|
||||
"dev": true,
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/fsevents": {
|
||||
|
@ -6464,7 +6544,6 @@
|
|||
"resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
|
||||
"integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
|
||||
"deprecated": "Glob versions prior to v9 are no longer supported",
|
||||
"dev": true,
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"fs.realpath": "^1.0.0",
|
||||
|
@ -6638,7 +6717,6 @@
|
|||
"version": "4.2.11",
|
||||
"resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz",
|
||||
"integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==",
|
||||
"dev": true,
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/graphemer": {
|
||||
|
@ -6952,7 +7030,6 @@
|
|||
"version": "0.1.4",
|
||||
"resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz",
|
||||
"integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=0.8.19"
|
||||
|
@ -6980,7 +7057,6 @@
|
|||
"resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz",
|
||||
"integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==",
|
||||
"deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.",
|
||||
"dev": true,
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"once": "^1.3.0",
|
||||
|
@ -6991,7 +7067,6 @@
|
|||
"version": "2.0.4",
|
||||
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
|
||||
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==",
|
||||
"dev": true,
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/ini": {
|
||||
|
@ -8344,11 +8419,19 @@
|
|||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/lockfile": {
|
||||
"version": "1.0.4",
|
||||
"resolved": "https://registry.npmjs.org/lockfile/-/lockfile-1.0.4.tgz",
|
||||
"integrity": "sha512-cvbTwETRfsFh4nHsL1eGWapU1XFi5Ot9E85sWAwia7Y7EgB7vfqcZhTKZ+l7hCGxSPoushMv5GKhT5PdLv03WA==",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"signal-exit": "^3.0.2"
|
||||
}
|
||||
},
|
||||
"node_modules/lodash": {
|
||||
"version": "4.17.21",
|
||||
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
|
||||
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/lodash.get": {
|
||||
|
@ -8467,6 +8550,22 @@
|
|||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/log4js": {
|
||||
"version": "6.9.1",
|
||||
"resolved": "https://registry.npmjs.org/log4js/-/log4js-6.9.1.tgz",
|
||||
"integrity": "sha512-1somDdy9sChrr9/f4UlzhdaGfDR2c/SaD2a4T7qEkG4jTS57/B3qmnjLYePwQ8cqWnUHZI0iAKxMBpCZICiZ2g==",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"date-format": "^4.0.14",
|
||||
"debug": "^4.3.4",
|
||||
"flatted": "^3.2.7",
|
||||
"rfdc": "^1.3.0",
|
||||
"streamroller": "^3.1.5"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/lowercase-keys": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-2.0.0.tgz",
|
||||
|
@ -8734,7 +8833,6 @@
|
|||
"version": "3.1.2",
|
||||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
|
||||
"integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
|
||||
"dev": true,
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"brace-expansion": "^1.1.7"
|
||||
|
@ -8747,7 +8845,6 @@
|
|||
"version": "1.2.8",
|
||||
"resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz",
|
||||
"integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
|
@ -9395,7 +9492,6 @@
|
|||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
|
||||
"integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
|
@ -10351,7 +10447,6 @@
|
|||
"version": "1.4.1",
|
||||
"resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.4.1.tgz",
|
||||
"integrity": "sha512-q1b3N5QkRUWUl7iyylaaj3kOpIT0N2i9MqIEQXP73GVsN9cw3fdx8X63cEmWhJGi2PPCF23Ijp7ktmd39rawIA==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/rimraf": {
|
||||
|
@ -10620,7 +10715,6 @@
|
|||
"version": "3.0.7",
|
||||
"resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz",
|
||||
"integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==",
|
||||
"dev": true,
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/sisteransi": {
|
||||
|
@ -10822,6 +10916,52 @@
|
|||
"node": ">= 0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/streamroller": {
|
||||
"version": "3.1.5",
|
||||
"resolved": "https://registry.npmjs.org/streamroller/-/streamroller-3.1.5.tgz",
|
||||
"integrity": "sha512-KFxaM7XT+irxvdqSP1LGLgNWbYN7ay5owZ3r/8t77p+EtSUAfUgtl7be3xtqtOmGUl9K9YPO2ca8133RlTjvKw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"date-format": "^4.0.14",
|
||||
"debug": "^4.3.4",
|
||||
"fs-extra": "^8.1.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/streamroller/node_modules/fs-extra": {
|
||||
"version": "8.1.0",
|
||||
"resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-8.1.0.tgz",
|
||||
"integrity": "sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"graceful-fs": "^4.2.0",
|
||||
"jsonfile": "^4.0.0",
|
||||
"universalify": "^0.1.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6 <7 || >=8"
|
||||
}
|
||||
},
|
||||
"node_modules/streamroller/node_modules/jsonfile": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-4.0.0.tgz",
|
||||
"integrity": "sha512-m6F1R3z8jjlf2imQHS2Qez5sjKWQzbuuhuJ/FKYFRZvPE3PuHcSMVZzfsLhGVOkfd20obL5SWEBew5ShlquNxg==",
|
||||
"license": "MIT",
|
||||
"optionalDependencies": {
|
||||
"graceful-fs": "^4.1.6"
|
||||
}
|
||||
},
|
||||
"node_modules/streamroller/node_modules/universalify": {
|
||||
"version": "0.1.2",
|
||||
"resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz",
|
||||
"integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 4.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/string_decoder": {
|
||||
"version": "1.3.0",
|
||||
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz",
|
||||
|
|
|
@ -24,6 +24,7 @@
|
|||
"@electron/fuses": "^1.8.0",
|
||||
"@eslint/js": "^9.10.0",
|
||||
"@futo/forge-maker-wix-linux": "^7.5.0",
|
||||
"@types/electron-json-storage": "^4.5.4",
|
||||
"@types/jest": "^29.5.11",
|
||||
"@types/mdns": "^0.0.38",
|
||||
"@types/node-forge": "^1.3.10",
|
||||
|
@ -44,12 +45,15 @@
|
|||
"webpack-cli": "^5.0.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"@vscode/sudo-prompt": "^9.3.1",
|
||||
"bufferutil": "^4.0.8",
|
||||
"dashjs": "^4.7.4",
|
||||
"electron-json-storage": "^4.6.0",
|
||||
"extract-zip": "^2.0.1",
|
||||
"hls.js": "^1.5.15",
|
||||
"http": "^0.0.1-security",
|
||||
"https": "^1.0.0",
|
||||
"log4js": "^6.9.1",
|
||||
"qrcode": "^1.5.3",
|
||||
"update-electron-app": "^3.0.0",
|
||||
"url": "^0.11.3",
|
||||
|
|
|
@ -1,150 +0,0 @@
|
|||
import os
|
||||
import hashlib
|
||||
import boto3
|
||||
from botocore.client import Config
|
||||
import shutil
|
||||
from functools import cmp_to_key
|
||||
|
||||
ACCOUNT_ID = os.environ.get('R2_ACCOUNT_ID')
|
||||
ACCESS_KEY_ID = os.environ.get('R2_ACCESS_KEY_ID')
|
||||
SECRET_ACCESS_KEY = os.environ.get('R2_SECRET_ACCESS_KEY')
|
||||
BUCKET_NAME = os.environ.get('R2_BUCKET_NAME')
|
||||
|
||||
DEPLOY_DIR = os.environ.get('FCAST_DO_RUNNER_DEPLOY_DIR')
|
||||
TEMP_DIR = os.path.join(DEPLOY_DIR, 'temp')
|
||||
LOCAL_CACHE_DIR = os.path.join(DEPLOY_DIR, 'cache')
|
||||
|
||||
# Customizable CI parameters
|
||||
CACHE_VERSION_AMOUNT = int(os.environ.get('CACHE_VERSION_AMOUNT', default="-1"))
|
||||
RELEASE_CANDIDATE = bool(os.environ.get('RELEASE_CANDIDATE', default=False))
|
||||
RELEASE_CANDIDATE_VERSION = int(os.environ.get('RELEASE_CANDIDATE_VERSION', default="1"))
|
||||
|
||||
# Utility functions
|
||||
def compare_versions(x, y):
|
||||
x_parts = x.split('.')
|
||||
y_parts = y.split('.')
|
||||
|
||||
for i in range(len(x_parts)):
|
||||
if x_parts[i] < y_parts[i]:
|
||||
return -1
|
||||
elif x_parts[i] > y_parts[i]:
|
||||
return 1
|
||||
|
||||
return 0
|
||||
|
||||
# Initial setup
|
||||
|
||||
# Note: Cloudflare R2 docs outdated, secret is not supposed to be hashed...
|
||||
|
||||
# Hash the secret access key using SHA-256
|
||||
#hashed_secret_key = hashlib.sha256(SECRET_ACCESS_KEY.encode()).hexdigest()
|
||||
|
||||
# Configure the S3 client for Cloudflare R2
|
||||
s3 = boto3.client('s3',
|
||||
endpoint_url=f'https://{ACCOUNT_ID}.r2.cloudflarestorage.com',
|
||||
aws_access_key_id=ACCESS_KEY_ID,
|
||||
# aws_secret_access_key=hashed_secret_key,
|
||||
aws_secret_access_key=SECRET_ACCESS_KEY,
|
||||
config=Config(
|
||||
signature_version='s3v4'
|
||||
)
|
||||
)
|
||||
list_response = s3.list_objects_v2(Bucket=BUCKET_NAME, Prefix='electron/')
|
||||
bucket_files = list_response.get('Contents', [])
|
||||
bucket_versions_full = sorted(set(map(lambda x: x['Key'].split('/')[1], bucket_files)), key=cmp_to_key(compare_versions), reverse=True)
|
||||
bucket_versions = bucket_versions_full if CACHE_VERSION_AMOUNT < 0 else bucket_versions_full[:CACHE_VERSION_AMOUNT]
|
||||
os.makedirs(TEMP_DIR, exist_ok=True)
|
||||
|
||||
# CI functions
|
||||
|
||||
def copy_artifacts_to_local_cache():
|
||||
if len(os.listdir('/artifacts')) == 0:
|
||||
print('No artifacts were built...')
|
||||
return None
|
||||
|
||||
print('Copying artifacts to cache...')
|
||||
# All artifact should have same version in format: /artifacts/PKG/OS/ARCH/fcast-receiver-VERSION-OS-ARCH.PKG
|
||||
version = os.listdir('/artifacts/zip/linux/x64')[0].split('-')[2]
|
||||
dst = os.path.join(TEMP_DIR, version)
|
||||
print(f'Current app version: {version}')
|
||||
|
||||
shutil.copytree('/artifacts', dst, dirs_exist_ok=True, ignore=shutil.ignore_patterns('*.w*'))
|
||||
for dir in os.listdir('/artifacts'):
|
||||
shutil.rmtree(os.path.join('/artifacts', dir))
|
||||
|
||||
return version
|
||||
|
||||
def sync_local_cache():
|
||||
print('Syncing local cache with s3...')
|
||||
local_files = []
|
||||
for root, _, files in os.walk(LOCAL_CACHE_DIR):
|
||||
for filename in files:
|
||||
rel_path = os.path.relpath(os.path.join(root, filename), LOCAL_CACHE_DIR)
|
||||
version = os.path.relpath(rel_path, 'electron/').split('/')[0]
|
||||
|
||||
if version in bucket_versions:
|
||||
local_files.append(rel_path)
|
||||
else:
|
||||
print(f'Purging file from local cache: {rel_path}')
|
||||
os.remove(os.path.join(root, filename))
|
||||
|
||||
for obj in bucket_files:
|
||||
filename = obj['Key']
|
||||
save_path = os.path.join(LOCAL_CACHE_DIR, filename)
|
||||
|
||||
if filename not in local_files:
|
||||
print(f'Downloading file: {filename}')
|
||||
get_response = s3.get_object(Bucket=BUCKET_NAME, Key=filename)
|
||||
|
||||
os.makedirs(os.path.dirname(save_path), exist_ok=True)
|
||||
with open(save_path, 'wb') as file:
|
||||
file.write(get_response['Body'].read())
|
||||
|
||||
def upload_local_cache(current_version):
|
||||
print('Uploading local cache to s3...')
|
||||
shutil.copytree(TEMP_DIR, os.path.join(LOCAL_CACHE_DIR, 'electron'), dirs_exist_ok=True)
|
||||
|
||||
local_files = []
|
||||
for root, _, files in os.walk(LOCAL_CACHE_DIR):
|
||||
for filename in files:
|
||||
full_path = os.path.join(root, filename)
|
||||
rel_path = os.path.relpath(full_path, LOCAL_CACHE_DIR)
|
||||
version = rel_path.split('/')[1]
|
||||
|
||||
if RELEASE_CANDIDATE and version == current_version:
|
||||
rc_path = full_path[:full_path.rfind('.')] + f'-rc{RELEASE_CANDIDATE_VERSION}' + full_path[full_path.rfind('.'):]
|
||||
os.rename(full_path, rc_path)
|
||||
rel_path = os.path.relpath(rc_path, LOCAL_CACHE_DIR)
|
||||
|
||||
local_files.append(rel_path)
|
||||
|
||||
for file_path in local_files:
|
||||
if file_path not in map(lambda x: x['Key'], bucket_files):
|
||||
print(f'Uploading file: {file_path}')
|
||||
|
||||
with open(os.path.join(LOCAL_CACHE_DIR, file_path), 'rb') as file:
|
||||
put_response = s3.put_object(
|
||||
Body=file,
|
||||
Bucket=BUCKET_NAME,
|
||||
Key=file_path,
|
||||
)
|
||||
|
||||
def generate_delta_updates(current_version):
|
||||
pass
|
||||
|
||||
# generate html previous version browsing (based off of bucket + and local if does not have all files)
|
||||
def generate_previous_releases_page():
|
||||
pass
|
||||
|
||||
def update_website():
|
||||
pass
|
||||
|
||||
# CI Operations
|
||||
current_version = copy_artifacts_to_local_cache()
|
||||
sync_local_cache()
|
||||
# generate_delta_updates(current_version)
|
||||
upload_local_cache(current_version)
|
||||
# generate_previous_releases_page()
|
||||
# update_website()
|
||||
|
||||
shutil.rmtree(TEMP_DIR)
|
1
receivers/electron/scripts/deploy/__init__.py
Normal file
1
receivers/electron/scripts/deploy/__init__.py
Normal file
|
@ -0,0 +1 @@
|
|||
from .util import *
|
254
receivers/electron/scripts/deploy/deploy.py
Normal file
254
receivers/electron/scripts/deploy/deploy.py
Normal file
|
@ -0,0 +1,254 @@
|
|||
import os
|
||||
import hashlib
|
||||
import json
|
||||
import shutil
|
||||
from functools import cmp_to_key
|
||||
from util import BUCKET_NAME, S3Client, PackageFormat, ArtifactVersion, compare_versions, generate_update_tarball
|
||||
|
||||
DEPLOY_DIR = os.environ.get('FCAST_DO_RUNNER_DEPLOY_DIR')
|
||||
TEMP_DIR = os.path.join(DEPLOY_DIR, 'temp')
|
||||
LOCAL_CACHE_DIR = os.path.join(DEPLOY_DIR, 'cache')
|
||||
BASE_DOWNLOAD_URL = BUCKET_NAME.replace('-', '.')
|
||||
EXCLUDED_DELTA_VERSIONS = ["1.0.14"]
|
||||
|
||||
# Version tracking for migration support
|
||||
RELEASES_JSON_FILE_VERSION = 1
|
||||
RELEASES_JSON_MAJOR_VERSION = '1'
|
||||
RELEASES_JSON = f'releases_v{RELEASES_JSON_MAJOR_VERSION}.json'
|
||||
|
||||
# Customizable CI parameters
|
||||
CACHE_VERSION_AMOUNT = int(os.environ.get('CACHE_VERSION_AMOUNT', default="-1"))
|
||||
|
||||
s3 = S3Client(CACHE_VERSION_AMOUNT, EXCLUDED_DELTA_VERSIONS)
|
||||
|
||||
# CI functions
|
||||
def ensure_files_exist(dirs, files):
|
||||
for d in dirs:
|
||||
os.makedirs(d, exist_ok=True)
|
||||
|
||||
for f in files:
|
||||
if not os.path.exists(os.path.join(LOCAL_CACHE_DIR, f)):
|
||||
s3.download_file(os.path.join(LOCAL_CACHE_DIR, f), f)
|
||||
|
||||
def copy_artifacts_to_local_cache():
|
||||
version = None
|
||||
with open(os.path.join(LOCAL_CACHE_DIR, 'electron', RELEASES_JSON) , 'r') as file:
|
||||
releases = json.load(file)
|
||||
version = ArtifactVersion(releases['currentVersion'], 'stable', None)
|
||||
|
||||
if len(os.listdir('/artifacts')) == 0:
|
||||
print('No artifacts were built...')
|
||||
return version
|
||||
|
||||
print('Copying artifacts to cache...')
|
||||
# Picking a random package that exists from the build pipeline
|
||||
artifact = PackageFormat(os.listdir('/artifacts/zip/linux/x64')[0])
|
||||
version = ArtifactVersion(artifact.version, artifact.channel, artifact.channel_version)
|
||||
dst = os.path.join(TEMP_DIR, version.version)
|
||||
|
||||
shutil.copytree('/artifacts', dst, dirs_exist_ok=True, ignore=shutil.ignore_patterns('*.w*'))
|
||||
for dir in os.listdir('/artifacts'):
|
||||
shutil.rmtree(os.path.join('/artifacts', dir))
|
||||
|
||||
print(f'Current app version: {version}')
|
||||
return version
|
||||
|
||||
def sync_local_cache():
|
||||
print('Syncing local cache with s3...')
|
||||
local_files = []
|
||||
for root, _, files in os.walk(LOCAL_CACHE_DIR):
|
||||
for filename in files:
|
||||
rel_path = os.path.relpath(os.path.join(root, filename), LOCAL_CACHE_DIR)
|
||||
version = os.path.relpath(rel_path, 'electron/').split('/')[0]
|
||||
|
||||
if version in s3.get_versions() or filename == RELEASES_JSON:
|
||||
local_files.append(rel_path)
|
||||
elif filename != RELEASES_JSON:
|
||||
print(f'Purging file from local cache: {rel_path}')
|
||||
os.remove(os.path.join(root, filename))
|
||||
|
||||
for obj in s3.get_bucket_files():
|
||||
filename = obj['Key']
|
||||
path = os.path.join(LOCAL_CACHE_DIR, filename)
|
||||
|
||||
if filename not in local_files:
|
||||
s3.download_file(path, filename)
|
||||
|
||||
def upload_local_cache():
|
||||
print('Uploading local cache to s3...')
|
||||
shutil.copytree(TEMP_DIR, os.path.join(LOCAL_CACHE_DIR, 'electron'), dirs_exist_ok=True)
|
||||
|
||||
local_files = []
|
||||
for root, _, files in os.walk(LOCAL_CACHE_DIR):
|
||||
for filename in files:
|
||||
full_path = os.path.join(root, filename)
|
||||
rel_path = os.path.relpath(full_path, LOCAL_CACHE_DIR)
|
||||
local_files.append(rel_path)
|
||||
|
||||
for file_path in local_files:
|
||||
if file_path not in map(lambda x: x['Key'], s3.get_bucket_files()) or os.path.basename(file_path) == RELEASES_JSON:
|
||||
s3.upload_file(os.path.join(LOCAL_CACHE_DIR, file_path), file_path)
|
||||
|
||||
# TODO: WIP
|
||||
def generate_delta_updates(artifact_version):
|
||||
delta_info = {}
|
||||
|
||||
releases = None
|
||||
with open(os.path.join(LOCAL_CACHE_DIR, 'electron', RELEASES_JSON) , 'r') as file:
|
||||
releases = json.load(file)
|
||||
|
||||
# Get sha digest from base version for integrity validation
|
||||
print('Generating sha digests from previous updates...')
|
||||
for root, _, files in os.walk(LOCAL_CACHE_DIR):
|
||||
for filename in filter(lambda f: f.endswith('.zip'), files):
|
||||
full_path = os.path.join(root, filename)
|
||||
rel_path = os.path.relpath(full_path, os.path.join(LOCAL_CACHE_DIR, 'electron'))
|
||||
package = PackageFormat(rel_path)
|
||||
|
||||
if package.channel != artifact_version.channel or package.version in EXCLUDED_DELTA_VERSIONS:
|
||||
continue
|
||||
|
||||
print(f'Generating sha digests from: {full_path}')
|
||||
artifact_name, digest = generate_update_tarball(full_path, rel_path, TEMP_DIR, package)
|
||||
print(f'Digest Info: {artifact_name} {digest}')
|
||||
|
||||
os_dict = delta_info.get(package.channel, {})
|
||||
arch_dict = os_dict.get(package.os, {})
|
||||
version_dict = arch_dict.get(package.arch, {})
|
||||
|
||||
delta_entry = {
|
||||
'path': os.path.join(TEMP_DIR, os.path.dirname(rel_path), artifact_name),
|
||||
'digest': digest,
|
||||
}
|
||||
|
||||
version_dict[package.version] = delta_entry
|
||||
arch_dict[package.arch] = version_dict
|
||||
os_dict[package.os] = arch_dict
|
||||
delta_info[package.channel] = os_dict
|
||||
|
||||
|
||||
# TODO: Add limit on amount of delta patches to create (either fixed number or by memory savings)
|
||||
# TODO: Parallelize bsdiff invocation since its single-threaded, provided enough RAM available
|
||||
print('Generating delta updates...')
|
||||
previous_versions = filter(lambda v: v not in EXCLUDED_DELTA_VERSIONS, releases['previousVersions'])
|
||||
for delta_version in previous_versions:
|
||||
# Create delta patches
|
||||
for root, _, files in os.walk(TEMP_DIR):
|
||||
for filename in filter(lambda f: f.endswith('.zip'), files):
|
||||
full_path = os.path.join(root, filename)
|
||||
rel_path = os.path.relpath(full_path, TEMP_DIR)
|
||||
package = PackageFormat(rel_path)
|
||||
|
||||
if package.version in EXCLUDED_DELTA_VERSIONS:
|
||||
continue
|
||||
|
||||
artifact_name, digest = generate_update_tarball(full_path, rel_path, TEMP_DIR, package)
|
||||
base_file = delta_info[package.channel][package.os][package.arch][delta_version]['path']
|
||||
new_file = os.path.join(os.path.dirname(full_path), artifact_name)
|
||||
delta_file = os.path.join(os.path.dirname(full_path), f'{package.name}-{package.version}-{package.os_pretty}-{package.arch}-delta-{delta_version}.delta')
|
||||
command = f'bsdiff {base_file} {new_file} {delta_file}'
|
||||
|
||||
print(f'temp skipping delta generation: {command}')
|
||||
# print(f'Generating delta update: {command}')
|
||||
# os.system(command)
|
||||
# os.remove(base_file)
|
||||
# os.remove(new_file)
|
||||
|
||||
return delta_info
|
||||
|
||||
def generate_releases_json(artifact_version, delta_info):
|
||||
print(f'Generating {RELEASES_JSON}...')
|
||||
releases = None
|
||||
with open(os.path.join(LOCAL_CACHE_DIR, 'electron', RELEASES_JSON) , 'r') as file:
|
||||
releases = json.load(file)
|
||||
|
||||
current_version = releases.get('currentVersion', '0.0.0')
|
||||
current_releases = releases.get('currentReleases', {})
|
||||
channel_current_versions = releases.get('channelCurrentVersions', {})
|
||||
|
||||
all_versions = releases.get('allVersions', [])
|
||||
if current_version not in all_versions:
|
||||
all_versions.append(current_version)
|
||||
|
||||
for root, _, files in os.walk(TEMP_DIR):
|
||||
# Only offer zip and delta updates. Other packages will update from zip packages
|
||||
for filename in filter(lambda f: f.endswith('.zip') or f.endswith('.delta'), files):
|
||||
full_path = os.path.join(root, filename)
|
||||
rel_path = os.path.relpath(full_path, TEMP_DIR)
|
||||
package = PackageFormat(rel_path)
|
||||
url = f'https://{BASE_DOWNLOAD_URL}/electron/{rel_path}'
|
||||
|
||||
digest = ''
|
||||
with open(full_path, 'rb') as file:
|
||||
digest = hashlib.sha256(file.read()).hexdigest()
|
||||
|
||||
os_dict = current_releases.get(package.channel, {})
|
||||
arch_dict = os_dict.get(package.os, {})
|
||||
entry_dict = arch_dict.get(package.arch, {})
|
||||
|
||||
if package.is_delta:
|
||||
delta_dict = entry_dict.get('deltas', {})
|
||||
delta_entry = {
|
||||
'deltaUrl': url,
|
||||
'sha256Digest': digest,
|
||||
'baseVersion': package.delta_base_version,
|
||||
'baseSha256Digest': delta_info[package.channel][package.os][package.arch][package.delta_base_version]['digest'],
|
||||
}
|
||||
delta_dict[package.delta_base_version] = delta_entry
|
||||
entry_dict['deltas'] = delta_dict
|
||||
else:
|
||||
entry_dict['url'] = url
|
||||
entry_dict['sha256Digest'] = digest
|
||||
|
||||
arch_dict[package.arch] = entry_dict
|
||||
os_dict[package.os] = arch_dict
|
||||
current_releases[package.channel] = os_dict
|
||||
|
||||
if package.channel != 'stable':
|
||||
channel_current_versions[package.channel] = max(int(package.channel_version), channel_current_versions.get(package.channel, 0))
|
||||
|
||||
if artifact_version.channel == 'stable' and max([artifact_version.version, current_version], key=cmp_to_key(compare_versions)):
|
||||
releases['currentVersion'] = artifact_version.version
|
||||
else:
|
||||
releases['currentVersion'] = current_version
|
||||
|
||||
releases['previousVersions'] = s3.get_versions(full=True)
|
||||
releases['fileVersion'] = RELEASES_JSON_FILE_VERSION
|
||||
releases['allVersions'] = all_versions
|
||||
releases['channelCurrentVersions'] = channel_current_versions
|
||||
releases['currentReleases'] = current_releases
|
||||
|
||||
with open(os.path.join(LOCAL_CACHE_DIR, 'electron', RELEASES_JSON) , 'w') as file:
|
||||
json.dump(releases, file, indent=4)
|
||||
|
||||
def generate_previous_releases_page():
|
||||
pass
|
||||
|
||||
def update_website():
|
||||
pass
|
||||
|
||||
# CI Operations
|
||||
ensure_files_exist(dirs=[
|
||||
'/artifacts',
|
||||
DEPLOY_DIR,
|
||||
TEMP_DIR,
|
||||
LOCAL_CACHE_DIR,
|
||||
os.path.join(LOCAL_CACHE_DIR, 'electron')
|
||||
],
|
||||
files=[
|
||||
os.path.join('electron', RELEASES_JSON)
|
||||
])
|
||||
artifact_version = copy_artifacts_to_local_cache()
|
||||
sync_local_cache()
|
||||
|
||||
# Disabling delta update generation for now...
|
||||
# delta_info = generate_delta_updates(artifact_version)
|
||||
delta_info = {}
|
||||
|
||||
generate_releases_json(artifact_version, delta_info)
|
||||
upload_local_cache()
|
||||
# generate_previous_releases_page()
|
||||
# update_website()
|
||||
|
||||
print('Cleaning up...')
|
||||
shutil.rmtree(TEMP_DIR)
|
211
receivers/electron/scripts/deploy/util.py
Normal file
211
receivers/electron/scripts/deploy/util.py
Normal file
|
@ -0,0 +1,211 @@
|
|||
import boto3
|
||||
import hashlib
|
||||
import os
|
||||
import requests
|
||||
import shutil
|
||||
from botocore.client import Config
|
||||
from collections import namedtuple
|
||||
from functools import cmp_to_key
|
||||
|
||||
CLOUDFLARE_CACHE_TOKEN = os.environ.get('CLOUDFLARE_CACHE_TOKEN')
|
||||
ZONE_ID = os.environ.get('CLOUDFLARE_ZONE_ID')
|
||||
ACCOUNT_ID = os.environ.get('R2_ACCOUNT_ID')
|
||||
ACCESS_KEY_ID = os.environ.get('R2_ACCESS_KEY_ID')
|
||||
SECRET_ACCESS_KEY = os.environ.get('R2_SECRET_ACCESS_KEY')
|
||||
BUCKET_NAME = os.environ.get('R2_BUCKET_NAME')
|
||||
|
||||
class S3Client:
|
||||
def __init__(self, cache_version_amount, excluded_delta_versions):
|
||||
# Note: Cloudflare R2 docs outdated, secret is not supposed to be hashed...
|
||||
|
||||
# Hash the secret access key using SHA-256
|
||||
#hashed_secret_key = hashlib.sha256(SECRET_ACCESS_KEY.encode()).hexdigest()
|
||||
|
||||
# Configure the S3 client for Cloudflare R2
|
||||
self.s3 = boto3.client('s3',
|
||||
endpoint_url=f'https://{ACCOUNT_ID}.r2.cloudflarestorage.com',
|
||||
aws_access_key_id=ACCESS_KEY_ID,
|
||||
# aws_secret_access_key=hashed_secret_key,
|
||||
aws_secret_access_key=SECRET_ACCESS_KEY,
|
||||
config=Config(
|
||||
signature_version='s3v4'
|
||||
)
|
||||
)
|
||||
list_response = self.s3.list_objects_v2(Bucket=BUCKET_NAME, Prefix='electron/')
|
||||
self.bucket_files = list_response.get('Contents', [])
|
||||
|
||||
bucket_files_versions = filter(lambda x: x['Key'] != 'electron/releases.json', self.bucket_files)
|
||||
self.bucket_versions_full = sorted(set(map(lambda x: x['Key'].split('/')[1], bucket_files_versions)), key=cmp_to_key(compare_versions), reverse=True)
|
||||
self.bucket_versions = self.bucket_versions_full if cache_version_amount < 0 else self.bucket_versions_full[:cache_version_amount]
|
||||
self.bucket_delta_versions = [v for v in self.bucket_versions if v not in excluded_delta_versions]
|
||||
|
||||
def get_bucket_files(self):
|
||||
return self.bucket_files
|
||||
|
||||
def get_versions(self, full=False):
|
||||
return self.bucket_versions_full if full else self.bucket_versions
|
||||
|
||||
def download_file(self, full_path, s3_path):
|
||||
print(f'Downloading file: {s3_path}')
|
||||
get_response = self.s3.get_object(Bucket=BUCKET_NAME, Key=s3_path)
|
||||
|
||||
os.makedirs(os.path.dirname(full_path), exist_ok=True)
|
||||
with open(full_path, 'wb') as file:
|
||||
file.write(get_response['Body'].read())
|
||||
|
||||
def upload_file(self, full_path, s3_path):
|
||||
print(f'Uploading file: {s3_path}')
|
||||
|
||||
domain = BUCKET_NAME.replace('-', '.')
|
||||
purge_response = requests.post(
|
||||
f'https://api.cloudflare.com/client/v4/zones/{ZONE_ID}/purge_cache',
|
||||
headers={
|
||||
'Authorization': f'Bearer {CLOUDFLARE_CACHE_TOKEN}',
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
json={
|
||||
'files': [f'https://{domain}/{s3_path}']
|
||||
}
|
||||
)
|
||||
|
||||
if purge_response.status_code != 200:
|
||||
print(f'Error while purging cache: {purge_response}')
|
||||
|
||||
with open(full_path, 'rb') as file:
|
||||
put_response = self.s3.put_object(
|
||||
Body=file,
|
||||
Bucket=BUCKET_NAME,
|
||||
Key=s3_path,
|
||||
)
|
||||
|
||||
# Utility types
|
||||
class PackageFormat:
|
||||
"""Parses an artifact path to extract package information
|
||||
|
||||
Artifact format: ((VERSION)?/PKG/(OS/ARCH|ARCH)/)?fcast-receiver-VERSION-OS-ARCH(-setup|-delta-DELTA_BASE_VERSION)?(-CHANNEL-CHANNEL_VERSION)?.PKG
|
||||
"""
|
||||
|
||||
def __init__(self, path):
|
||||
self.version = None
|
||||
self.type = None
|
||||
self.os = None
|
||||
self.os_pretty = None
|
||||
self.arch = None
|
||||
self.name = None
|
||||
self.is_delta = False
|
||||
self.delta_base_version = None
|
||||
self.channel = None
|
||||
self.channel_version = None
|
||||
|
||||
dirs = path.split('/')
|
||||
file = path.split('-')
|
||||
self.name = 'fcast-receiver'
|
||||
|
||||
if len(dirs) > 1:
|
||||
parse_index = 0
|
||||
|
||||
if dirs[parse_index].count('.') > 0:
|
||||
self.version = dirs[parse_index]
|
||||
self.type = dirs[parse_index + 1]
|
||||
parse_index += 2
|
||||
else:
|
||||
self.type = dirs[parse_index]
|
||||
parse_index += 1
|
||||
|
||||
if self.type == 'zip':
|
||||
self.os = dirs[parse_index]
|
||||
self.os_pretty = 'windows' if self.os == 'win32' else 'macOS' if self.os == 'darwin' else 'linux'
|
||||
self.arch = dirs[parse_index + 1]
|
||||
parse_index += 2
|
||||
else:
|
||||
if self.type == 'wix':
|
||||
self.os = 'win32'
|
||||
self.os_pretty = 'windows'
|
||||
self.arch = dirs[parse_index]
|
||||
elif self.type == 'dmg':
|
||||
self.os = 'darwin'
|
||||
self.os_pretty = 'macOS'
|
||||
self.arch = dirs[parse_index]
|
||||
elif self.type == 'deb' or self.type == 'rpm':
|
||||
self.os = 'linux'
|
||||
self.os_pretty = 'linux'
|
||||
self.arch = dirs[parse_index]
|
||||
parse_index += 1
|
||||
|
||||
# Unsupported package format (e.g. 1.0.14)
|
||||
if self.version == '1.0.14':
|
||||
return
|
||||
|
||||
file = dirs[parse_index].split('-')
|
||||
|
||||
self.version = file[2]
|
||||
channel_index = 5
|
||||
if len(file) == channel_index:
|
||||
self.channel = 'stable'
|
||||
return
|
||||
|
||||
if file[channel_index] == 'delta':
|
||||
self.is_delta = True
|
||||
self.delta_base_version = file[channel_index + 1].replace('.delta', '')
|
||||
channel_index += 2
|
||||
elif file[channel_index] == 'setup':
|
||||
channel_index += 1
|
||||
|
||||
if len(file) > channel_index:
|
||||
self.channel = file[channel_index]
|
||||
version = file[channel_index + 1]
|
||||
self.channel_version = version[:version.rfind('.')]
|
||||
else:
|
||||
self.channel = 'stable'
|
||||
|
||||
def packageNamePretty(self):
|
||||
if self.channel != 'stable':
|
||||
return f'{self.name}-{self.version}-{self.os_pretty}-{self.arch}-{self.channel}-{self.channel_version}'
|
||||
else:
|
||||
return f'{self.name}-{self.version}-{self.os_pretty}-{self.arch}'
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f'''PackageFormat(type={self.type}, version={self.version}, os={self.os}, arch={self.arch},
|
||||
is_delta={self.is_delta}, delta_base_version={self.delta_base_version}, channel={self.channel},
|
||||
channel_version={self.channel_version})'''
|
||||
|
||||
ArtifactVersion = namedtuple('ArtifactVersion', ['version', 'channel', 'channel_version'])
|
||||
|
||||
# Utility functions
|
||||
def compare_versions(x, y):
|
||||
x_parts = x.split('.')
|
||||
y_parts = y.split('.')
|
||||
|
||||
for i in range(len(x_parts)):
|
||||
if x_parts[i] < y_parts[i]:
|
||||
return -1
|
||||
elif x_parts[i] > y_parts[i]:
|
||||
return 1
|
||||
|
||||
return 0
|
||||
|
||||
def generate_update_tarball(full_path, rel_path, working_dir, package):
|
||||
if package.os == 'darwin':
|
||||
temp_working_dir = os.path.join(working_dir, os.path.dirname(rel_path), f'{package.name}-{package.os}-{package.arch}')
|
||||
extract_dir = temp_working_dir
|
||||
else:
|
||||
temp_working_dir = os.path.join(working_dir, os.path.dirname(rel_path))
|
||||
extract_dir = os.path.join(temp_working_dir, f'{package.name}-{package.os}-{package.arch}')
|
||||
|
||||
shutil.unpack_archive(full_path, temp_working_dir)
|
||||
|
||||
if package.os == 'darwin':
|
||||
shutil.make_archive(os.path.join(working_dir, os.path.dirname(rel_path), package.packageNamePretty()), 'tar', extract_dir)
|
||||
shutil.rmtree(temp_working_dir)
|
||||
|
||||
temp_working_dir = os.path.join(working_dir, os.path.dirname(rel_path))
|
||||
else:
|
||||
shutil.make_archive(os.path.join(temp_working_dir, package.packageNamePretty()), 'tar', extract_dir)
|
||||
shutil.rmtree(extract_dir)
|
||||
|
||||
digest = ''
|
||||
artifact_name = f'{package.packageNamePretty()}.tar'
|
||||
with open(os.path.join(temp_working_dir, artifact_name), 'rb') as file:
|
||||
digest = hashlib.sha256(file.read()).hexdigest()
|
||||
|
||||
return artifact_name, digest
|
|
@ -1,4 +1,4 @@
|
|||
import { app } from 'electron';
|
||||
import Main from './Main';
|
||||
|
||||
Main.main(app);
|
||||
await Main.main(app);
|
|
@ -1,6 +1,8 @@
|
|||
import mdns from 'mdns-js';
|
||||
import * as log4js from "log4js";
|
||||
const cp = require('child_process');
|
||||
const os = require('os');
|
||||
const logger = log4js.getLogger();
|
||||
|
||||
export class DiscoveryService {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
|
@ -22,15 +24,15 @@ export class DiscoveryService {
|
|||
hostname = os.hostname();
|
||||
}
|
||||
catch (err) {
|
||||
console.warn('Error fetching hostname, trying different method...');
|
||||
console.warn(err);
|
||||
logger.warn('Error fetching hostname, trying different method...');
|
||||
logger.warn(err);
|
||||
|
||||
try {
|
||||
hostname = cp.execSync("hostnamectl hostname").toString().trim();
|
||||
}
|
||||
catch (err2) {
|
||||
console.warn('Error fetching hostname again, using generic name...');
|
||||
console.warn(err2);
|
||||
logger.warn('Error fetching hostname again, using generic name...');
|
||||
logger.warn(err2);
|
||||
|
||||
hostname = 'linux device';
|
||||
}
|
||||
|
@ -50,7 +52,7 @@ export class DiscoveryService {
|
|||
}
|
||||
|
||||
const name = `FCast-${DiscoveryService.getComputerName()}`;
|
||||
console.log("Discovery service started.", name);
|
||||
logger.info("Discovery service started.", name);
|
||||
|
||||
this.serviceTcp = mdns.createAdvertisement(mdns.tcp('_fcast'), 46899, { name: name });
|
||||
this.serviceTcp.start();
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
import net = require('net');
|
||||
import * as net from 'net';
|
||||
import * as log4js from "log4js";
|
||||
import { EventEmitter } from 'node:events';
|
||||
import { PlaybackErrorMessage, PlaybackUpdateMessage, PlayMessage, SeekMessage, SetSpeedMessage, SetVolumeMessage, VersionMessage, VolumeUpdateMessage } from './Packets';
|
||||
import { WebSocket } from 'ws';
|
||||
const logger = log4js.getLogger();
|
||||
|
||||
enum SessionState {
|
||||
Idle = 0,
|
||||
|
@ -47,7 +49,7 @@ export class FCastSession {
|
|||
|
||||
send(opcode: number, message = null) {
|
||||
const json = message ? JSON.stringify(message) : null;
|
||||
console.log(`send (opcode: ${opcode}, body: ${json})`);
|
||||
logger.info(`send (opcode: ${opcode}, body: ${json})`);
|
||||
|
||||
let data: Uint8Array;
|
||||
if (json) {
|
||||
|
@ -87,7 +89,7 @@ export class FCastSession {
|
|||
return;
|
||||
}
|
||||
|
||||
console.log(`${receivedBytes.length} bytes received`);
|
||||
logger.info(`${receivedBytes.length} bytes received`);
|
||||
|
||||
switch (this.state) {
|
||||
case SessionState.WaitingForLength:
|
||||
|
@ -97,7 +99,7 @@ export class FCastSession {
|
|||
this.handlePacketBytes(receivedBytes);
|
||||
break;
|
||||
default:
|
||||
console.log(`Data received is unhandled in current session state ${this.state}.`);
|
||||
logger.info(`Data received is unhandled in current session state ${this.state}.`);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -108,20 +110,20 @@ export class FCastSession {
|
|||
receivedBytes.copy(this.buffer, this.bytesRead, 0, bytesToRead);
|
||||
this.bytesRead += bytesToRead;
|
||||
|
||||
console.log(`handleLengthBytes: Read ${bytesToRead} bytes from packet`);
|
||||
logger.info(`handleLengthBytes: Read ${bytesToRead} bytes from packet`);
|
||||
|
||||
if (this.bytesRead >= LENGTH_BYTES) {
|
||||
this.state = SessionState.WaitingForData;
|
||||
this.packetLength = this.buffer.readUInt32LE(0);
|
||||
this.bytesRead = 0;
|
||||
console.log(`Packet length header received from: ${this.packetLength}`);
|
||||
logger.info(`Packet length header received from: ${this.packetLength}`);
|
||||
|
||||
if (this.packetLength > MAXIMUM_PACKET_LENGTH) {
|
||||
throw new Error(`Maximum packet length is 32kB: ${this.packetLength}`);
|
||||
}
|
||||
|
||||
if (bytesRemaining > 0) {
|
||||
console.log(`${bytesRemaining} remaining bytes pushed to handlePacketBytes`);
|
||||
logger.info(`${bytesRemaining} remaining bytes pushed to handlePacketBytes`);
|
||||
this.handlePacketBytes(receivedBytes.slice(bytesToRead));
|
||||
}
|
||||
}
|
||||
|
@ -133,10 +135,10 @@ export class FCastSession {
|
|||
receivedBytes.copy(this.buffer, this.bytesRead, 0, bytesToRead);
|
||||
this.bytesRead += bytesToRead;
|
||||
|
||||
console.log(`handlePacketBytes: Read ${bytesToRead} bytes from packet`);
|
||||
logger.info(`handlePacketBytes: Read ${bytesToRead} bytes from packet`);
|
||||
|
||||
if (this.bytesRead >= this.packetLength) {
|
||||
console.log(`Packet finished receiving from of ${this.packetLength} bytes.`);
|
||||
logger.info(`Packet finished receiving from of ${this.packetLength} bytes.`);
|
||||
this.handleNextPacket();
|
||||
|
||||
this.state = SessionState.WaitingForLength;
|
||||
|
@ -144,14 +146,14 @@ export class FCastSession {
|
|||
this.bytesRead = 0;
|
||||
|
||||
if (bytesRemaining > 0) {
|
||||
console.log(`${bytesRemaining} remaining bytes pushed to handleLengthBytes`);
|
||||
logger.info(`${bytesRemaining} remaining bytes pushed to handleLengthBytes`);
|
||||
this.handleLengthBytes(receivedBytes.slice(bytesToRead));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private handlePacket(opcode: number, body: string | undefined) {
|
||||
console.log(`handlePacket (opcode: ${opcode}, body: ${body})`);
|
||||
logger.info(`handlePacket (opcode: ${opcode}, body: ${body})`);
|
||||
|
||||
try {
|
||||
switch (opcode) {
|
||||
|
@ -181,16 +183,16 @@ export class FCastSession {
|
|||
break;
|
||||
}
|
||||
} catch (e) {
|
||||
console.warn(`Error handling packet from.`, e);
|
||||
logger.warn(`Error handling packet from.`, e);
|
||||
}
|
||||
}
|
||||
|
||||
private handleNextPacket() {
|
||||
console.log(`Processing packet of ${this.bytesRead} bytes from`);
|
||||
logger.info(`Processing packet of ${this.bytesRead} bytes from`);
|
||||
|
||||
const opcode = this.buffer[0];
|
||||
const body = this.packetLength > 1 ? this.buffer.toString('utf8', 1, this.packetLength) : null;
|
||||
console.log('body', body);
|
||||
logger.info('body', body);
|
||||
this.handlePacket(opcode, body);
|
||||
}
|
||||
|
||||
|
|
|
@ -9,6 +9,7 @@ import * as os from 'os';
|
|||
import * as path from 'path';
|
||||
import * as http from 'http';
|
||||
import * as url from 'url';
|
||||
import * as log4js from "log4js";
|
||||
import { AddressInfo } from 'ws';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
import yargs from 'yargs';
|
||||
|
@ -29,6 +30,7 @@ export default class Main {
|
|||
static proxyServer: http.Server;
|
||||
static proxyServerAddress: AddressInfo;
|
||||
static proxiedFiles: Map<string, { url: string, headers: { [key: string]: string } }> = new Map();
|
||||
static logger: log4js.Logger;
|
||||
|
||||
private static toggleMainWindow() {
|
||||
if (Main.mainWindow) {
|
||||
|
@ -51,25 +53,18 @@ export default class Main {
|
|||
{
|
||||
label: 'Check for updates',
|
||||
click: async () => {
|
||||
if (Updater.updateDownloaded) {
|
||||
Main.mainWindow.webContents.send("download-complete");
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const updater = new Updater(path.join(__dirname, '../'), 'https://releases.grayjay.app/fcastreceiver');
|
||||
if (await updater.update()) {
|
||||
const restartPrompt = await dialog.showMessageBox({
|
||||
type: 'info',
|
||||
title: 'Update completed',
|
||||
message: 'The application has been updated. Restart now to apply the changes.',
|
||||
buttons: ['Restart'],
|
||||
defaultId: 0
|
||||
});
|
||||
const updateAvailable = await Updater.checkForUpdates();
|
||||
|
||||
console.log('Update completed');
|
||||
|
||||
// Restart the app if the user clicks the 'Restart' button
|
||||
if (restartPrompt.response === 0) {
|
||||
Main.application.relaunch();
|
||||
Main.application.exit(0);
|
||||
}
|
||||
} else {
|
||||
if (updateAvailable) {
|
||||
Main.mainWindow.webContents.send("update-available");
|
||||
}
|
||||
else {
|
||||
await dialog.showMessageBox({
|
||||
type: 'info',
|
||||
title: 'Already up-to-date',
|
||||
|
@ -81,13 +76,13 @@ export default class Main {
|
|||
} catch (err) {
|
||||
await dialog.showMessageBox({
|
||||
type: 'error',
|
||||
title: 'Failed to update',
|
||||
message: 'The application failed to update.',
|
||||
title: 'Failed to check for updates',
|
||||
message: err,
|
||||
buttons: ['OK'],
|
||||
defaultId: 0
|
||||
});
|
||||
|
||||
console.error('Failed to update:', err);
|
||||
Main.logger.error('Failed to check for updates:', err);
|
||||
}
|
||||
},
|
||||
},
|
||||
|
@ -98,7 +93,7 @@ export default class Main {
|
|||
label: 'Restart',
|
||||
click: () => {
|
||||
this.application.relaunch();
|
||||
this.application.exit(0);
|
||||
this.application.exit();
|
||||
}
|
||||
},
|
||||
{
|
||||
|
@ -182,8 +177,34 @@ export default class Main {
|
|||
ipcMain.on('send-volume-update', (event: IpcMainEvent, value: VolumeUpdateMessage) => {
|
||||
l.send(Opcode.VolumeUpdate, value);
|
||||
});
|
||||
|
||||
ipcMain.on('send-download-request', async () => {
|
||||
if (!Updater.isDownloading) {
|
||||
try {
|
||||
await Updater.downloadUpdate();
|
||||
Main.mainWindow.webContents.send("download-complete");
|
||||
} catch (err) {
|
||||
await dialog.showMessageBox({
|
||||
type: 'error',
|
||||
title: 'Failed to download update',
|
||||
message: err,
|
||||
buttons: ['OK'],
|
||||
defaultId: 0
|
||||
});
|
||||
|
||||
Main.logger.error('Failed to download update:', err);
|
||||
Main.mainWindow.webContents.send("download-failed");
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
ipcMain.on('send-restart-request', async () => {
|
||||
Updater.restart();
|
||||
});
|
||||
});
|
||||
|
||||
ipcMain.handle('updater-progress', async () => { return Updater.updateProgress; });
|
||||
|
||||
ipcMain.handle('is-full-screen', async () => {
|
||||
const window = Main.playerWindow;
|
||||
if (!window) {
|
||||
|
@ -214,17 +235,27 @@ export default class Main {
|
|||
if (Main.shouldOpenMainWindow) {
|
||||
Main.openMainWindow();
|
||||
}
|
||||
|
||||
if (Updater.updateError) {
|
||||
dialog.showMessageBox({
|
||||
type: 'error',
|
||||
title: 'Error applying update',
|
||||
message: 'Please try again later or visit https://fcast.org to update.',
|
||||
buttons: ['OK'],
|
||||
defaultId: 0
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private static setupProxyServer(): Promise<void> {
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
try {
|
||||
console.log(`Proxy server starting`);
|
||||
Main.logger.info(`Proxy server starting`);
|
||||
|
||||
const port = 0;
|
||||
Main.proxyServer = http.createServer((req, res) => {
|
||||
console.log(`Request received`);
|
||||
Main.logger.info(`Request received`);
|
||||
const requestUrl = `http://${req.headers.host}${req.url}`;
|
||||
|
||||
const proxyInfo = Main.proxiedFiles.get(requestUrl);
|
||||
|
@ -265,7 +296,7 @@ export default class Main {
|
|||
|
||||
req.pipe(proxyReq, { end: true });
|
||||
proxyReq.on('error', (e) => {
|
||||
console.error(`Problem with request: ${e.message}`);
|
||||
Main.logger.error(`Problem with request: ${e.message}`);
|
||||
res.writeHead(500);
|
||||
res.end();
|
||||
});
|
||||
|
@ -275,7 +306,7 @@ export default class Main {
|
|||
});
|
||||
Main.proxyServer.listen(port, '127.0.0.1', () => {
|
||||
Main.proxyServerAddress = Main.proxyServer.address() as AddressInfo;
|
||||
console.log(`Proxy server running at http://127.0.0.1:${Main.proxyServerAddress.port}/`);
|
||||
Main.logger.info(`Proxy server running at http://127.0.0.1:${Main.proxyServerAddress.port}/`);
|
||||
resolve();
|
||||
});
|
||||
} catch (e) {
|
||||
|
@ -303,7 +334,7 @@ export default class Main {
|
|||
}
|
||||
|
||||
const proxiedUrl = `http://127.0.0.1:${Main.proxyServerAddress.port}/${uuidv4()}`;
|
||||
console.log("Proxied url", { proxiedUrl, url, headers });
|
||||
Main.logger.info("Proxied url", { proxiedUrl, url, headers });
|
||||
Main.proxiedFiles.set(proxiedUrl, { url: url, headers: headers });
|
||||
return proxiedUrl;
|
||||
}
|
||||
|
@ -356,21 +387,46 @@ export default class Main {
|
|||
});
|
||||
}
|
||||
|
||||
static main(app: Electron.App) {
|
||||
Main.application = app;
|
||||
const argv = yargs(hideBin(process.argv))
|
||||
.parserConfiguration({
|
||||
'boolean-negation': false
|
||||
})
|
||||
.options({
|
||||
'no-main-window': { type: 'boolean', default: false, desc: "Start minimized to tray" },
|
||||
'fullscreen': { type: 'boolean', default: false, desc: "Start application in fullscreen" }
|
||||
})
|
||||
.parseSync();
|
||||
static async main(app: Electron.App) {
|
||||
try {
|
||||
Main.application = app;
|
||||
const isUpdating = Updater.isUpdating();
|
||||
const fileLogType = isUpdating ? 'fileSync' : 'file';
|
||||
|
||||
Main.startFullscreen = argv.fullscreen;
|
||||
Main.shouldOpenMainWindow = !argv.noMainWindow;
|
||||
Main.application.on('ready', Main.onReady);
|
||||
Main.application.on('window-all-closed', () => { });
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
out: { type: 'stdout' },
|
||||
log: { type: fileLogType, filename: path.join(app.getPath('logs'), 'fcast-receiver.log'), flags: 'a', maxLogSize: '5M' },
|
||||
},
|
||||
categories: {
|
||||
default: { appenders: ['out', 'log'], level: 'info' },
|
||||
},
|
||||
});
|
||||
Main.logger = log4js.getLogger();
|
||||
Main.logger.info(`Starting application: ${app.name} (${app.getVersion()} - ${Updater.getChannelVersion()}) | ${app.getAppPath()}`);
|
||||
|
||||
if (isUpdating) {
|
||||
await Updater.processUpdate();
|
||||
}
|
||||
|
||||
const argv = yargs(hideBin(process.argv))
|
||||
.parserConfiguration({
|
||||
'boolean-negation': false
|
||||
})
|
||||
.options({
|
||||
'no-main-window': { type: 'boolean', default: false, desc: "Start minimized to tray" },
|
||||
'fullscreen': { type: 'boolean', default: false, desc: "Start application in fullscreen" }
|
||||
})
|
||||
.parseSync();
|
||||
|
||||
Main.startFullscreen = argv.fullscreen;
|
||||
Main.shouldOpenMainWindow = !argv.noMainWindow;
|
||||
Main.application.on('ready', Main.onReady);
|
||||
Main.application.on('window-all-closed', () => { });
|
||||
}
|
||||
catch (err) {
|
||||
Main.logger.error(`Error starting application: ${err}`);
|
||||
app.exit();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
35
receivers/electron/src/Store.ts
Normal file
35
receivers/electron/src/Store.ts
Normal file
|
@ -0,0 +1,35 @@
|
|||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
import storage from 'electron-json-storage';
|
||||
import { app } from 'electron';
|
||||
import * as log4js from "log4js";
|
||||
const logger = log4js.getLogger();
|
||||
|
||||
export class Store {
|
||||
private static storeVersion = 1;
|
||||
private static userSettings = 'UserSettings';
|
||||
private static settingsCache: any = null;
|
||||
|
||||
static {
|
||||
storage.setDataPath(app.getPath('userData'));
|
||||
Store.settingsCache = storage.getSync(Store.userSettings);
|
||||
|
||||
if (Store.get('storeVersion') === null) {
|
||||
Store.set('storeVersion', Store.storeVersion);
|
||||
}
|
||||
}
|
||||
|
||||
public static get(key: string): any {
|
||||
return Store.settingsCache[key] ?? null;
|
||||
}
|
||||
|
||||
public static set(key: string, value: any) {
|
||||
Store.settingsCache[key] = value;
|
||||
|
||||
logger.info(`Writing settings file: key '${key}', value ${JSON.stringify(value)}`);
|
||||
storage.set(Store.userSettings, Store.settingsCache, (err) => {
|
||||
if (err) {
|
||||
logger.error(`Error writing user settings: ${err}`);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
import net = require('net');
|
||||
import * as net from 'net';
|
||||
import { FCastSession, Opcode } from './FCastSession';
|
||||
import { EventEmitter } from 'node:events';
|
||||
import { dialog } from 'electron';
|
||||
|
@ -39,14 +39,14 @@ export class TcpListenerService {
|
|||
try {
|
||||
session.send(opcode, message);
|
||||
} catch (e) {
|
||||
console.warn("Failed to send error.", e);
|
||||
Main.logger.warn("Failed to send error.", e);
|
||||
session.close();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private async handleServerError(err: NodeJS.ErrnoException) {
|
||||
console.error("Server error:", err);
|
||||
Main.logger.error("Server error:", err);
|
||||
|
||||
const restartPrompt = await dialog.showMessageBox({
|
||||
type: 'error',
|
||||
|
@ -66,14 +66,14 @@ export class TcpListenerService {
|
|||
}
|
||||
|
||||
private handleConnection(socket: net.Socket) {
|
||||
console.log(`new connection from ${socket.remoteAddress}:${socket.remotePort}`);
|
||||
Main.logger.info(`new connection from ${socket.remoteAddress}:${socket.remotePort}`);
|
||||
|
||||
const session = new FCastSession(socket, (data) => socket.write(data));
|
||||
session.bindEvents(this.emitter);
|
||||
this.sessions.push(session);
|
||||
|
||||
socket.on("error", (err) => {
|
||||
console.warn(`Error from ${socket.remoteAddress}:${socket.remotePort}.`, err);
|
||||
Main.logger.warn(`Error from ${socket.remoteAddress}:${socket.remotePort}.`, err);
|
||||
socket.destroy();
|
||||
});
|
||||
|
||||
|
@ -81,7 +81,7 @@ export class TcpListenerService {
|
|||
try {
|
||||
session.processBytes(buffer);
|
||||
} catch (e) {
|
||||
console.warn(`Error while handling packet from ${socket.remoteAddress}:${socket.remotePort}.`, e);
|
||||
Main.logger.warn(`Error while handling packet from ${socket.remoteAddress}:${socket.remotePort}.`, e);
|
||||
socket.end();
|
||||
}
|
||||
});
|
||||
|
@ -94,10 +94,10 @@ export class TcpListenerService {
|
|||
});
|
||||
|
||||
try {
|
||||
console.log('Sending version');
|
||||
Main.logger.info('Sending version');
|
||||
session.send(Opcode.Version, {version: 2});
|
||||
} catch (e) {
|
||||
console.log('Failed to send version');
|
||||
Main.logger.info('Failed to send version', e);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,41 +1,69 @@
|
|||
import * as fs from 'fs';
|
||||
import * as https from 'https';
|
||||
import * as path from 'path';
|
||||
import { URL } from 'url';
|
||||
import * as crypto from 'crypto';
|
||||
import * as log4js from "log4js";
|
||||
import { app } from 'electron';
|
||||
import { Store } from './Store';
|
||||
import sudo from 'sudo-prompt';
|
||||
const cp = require('child_process');
|
||||
const extract = require('extract-zip');
|
||||
const logger = log4js.getLogger();
|
||||
|
||||
enum UpdateState {
|
||||
Copy = 'copy',
|
||||
Cleanup = 'cleanup',
|
||||
Error = 'error',
|
||||
};
|
||||
|
||||
interface ReleaseInfo {
|
||||
previousVersions: [string];
|
||||
currentVersion: string;
|
||||
currentReleases: [
|
||||
string: [ // channel
|
||||
string: [ // os
|
||||
string: [ // arch
|
||||
string: []
|
||||
]
|
||||
]
|
||||
]
|
||||
];
|
||||
channelCurrentVersions: [string: number];
|
||||
allVersions: [string];
|
||||
fileVersion: string;
|
||||
}
|
||||
|
||||
interface UpdateInfo {
|
||||
updateState: UpdateState;
|
||||
installPath: string;
|
||||
tempPath: string;
|
||||
currentVersion: string;
|
||||
downloadFile: string;
|
||||
error?: string
|
||||
}
|
||||
|
||||
export class Updater {
|
||||
private basePath: string;
|
||||
private baseUrl: string;
|
||||
private appFiles: string[];
|
||||
private static readonly supportedReleasesJsonVersion = '1';
|
||||
|
||||
constructor(basePath: string, baseUrl: string) {
|
||||
this.basePath = basePath;
|
||||
this.baseUrl = baseUrl;
|
||||
this.appFiles = [
|
||||
'dist/main/c.mp4',
|
||||
'dist/main/index.html',
|
||||
'dist/main/preload.js',
|
||||
'dist/main/qrcode.min.js',
|
||||
'dist/main/renderer.js',
|
||||
'dist/main/style.css',
|
||||
'dist/main/video-js.min.css',
|
||||
'dist/main/video.min.js',
|
||||
private static appPath: string = app.getAppPath();
|
||||
private static installPath: string = process.platform === 'darwin' ? path.join(Updater.appPath, '../../../') : path.join(Updater.appPath, '../../');
|
||||
private static updateDataPath: string = path.join(app.getPath('userData'), 'updater');
|
||||
private static updateMetadataPath = path.join(Updater.updateDataPath, './update.json');
|
||||
private static baseUrl: string = 'https://dl.fcast.org/electron';
|
||||
private static isRestarting: boolean = false;
|
||||
|
||||
'dist/player/index.html',
|
||||
'dist/player/preload.js',
|
||||
'dist/player/renderer.js',
|
||||
'dist/player/style.css',
|
||||
'dist/player/video-js.min.css',
|
||||
'dist/player/video.min.js',
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
private static localPackageJson: any = null;
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
private static releasesJson: any = null;
|
||||
|
||||
'dist/app.ico',
|
||||
'dist/app.png',
|
||||
'dist/bundle.js',
|
||||
'package.json'
|
||||
];
|
||||
}
|
||||
public static isDownloading: boolean = false;
|
||||
public static updateError: boolean = false;
|
||||
public static updateDownloaded: boolean = false;
|
||||
public static updateProgress: number = 0;
|
||||
|
||||
private async fetchJSON(url: string): Promise<any> {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
private static async fetchJSON(url: string): Promise<any> {
|
||||
return new Promise((resolve, reject) => {
|
||||
https.get(url, (res) => {
|
||||
let data = '';
|
||||
|
@ -56,11 +84,19 @@ export class Updater {
|
|||
});
|
||||
}
|
||||
|
||||
private async downloadFile(url: string, destination: string): Promise<void> {
|
||||
private static async downloadFile(url: string, destination: string): Promise<void> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const file = fs.createWriteStream(destination);
|
||||
https.get(url, (response) => {
|
||||
const downloadSize = Number(response.headers['content-length']);
|
||||
logger.info(`Update size: ${downloadSize} bytes`);
|
||||
response.pipe(file);
|
||||
let downloadedBytes = 0;
|
||||
|
||||
response.on('data', (chunk) => {
|
||||
downloadedBytes += chunk.length;
|
||||
Updater.updateProgress = downloadedBytes / downloadSize;
|
||||
});
|
||||
file.on('finish', () => {
|
||||
file.close();
|
||||
resolve();
|
||||
|
@ -72,40 +108,276 @@ export class Updater {
|
|||
});
|
||||
}
|
||||
|
||||
private compareVersions(v1: string, v2: string): number {
|
||||
const v1Parts = v1.split('.').map(Number);
|
||||
const v2Parts = v2.split('.').map(Number);
|
||||
private static async applyUpdate(src: string, dst: string) {
|
||||
try {
|
||||
fs.accessSync(dst, fs.constants.F_OK | fs.constants.R_OK | fs.constants.W_OK | fs.constants.X_OK);
|
||||
|
||||
for (let i = 0; i < v1Parts.length; i++) {
|
||||
if (v1Parts[i] > v2Parts[i]) {
|
||||
return 1;
|
||||
} else if (v1Parts[i] < v2Parts[i]) {
|
||||
return -1;
|
||||
// Electron runtime sees .asar file as directory and causes errors during copy/remove operations
|
||||
process.noAsar = true
|
||||
fs.rmSync(dst, { recursive: true, force: true });
|
||||
if (process.platform === 'darwin') {
|
||||
// Electron framework libraries break otherwise on Mac
|
||||
fs.cpSync(src, dst, { recursive: true, force: true, verbatimSymlinks: true });
|
||||
}
|
||||
else {
|
||||
fs.cpSync(src, dst, { recursive: true, force: true });
|
||||
}
|
||||
}
|
||||
catch (err) {
|
||||
if (err.code === 'EACCES' || err.code === 'EPERM') {
|
||||
logger.info('Update requires admin privileges. Escalating...');
|
||||
|
||||
return 0;
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
let command: string;
|
||||
if (process.platform === 'win32') {
|
||||
// Using native cmd.exe seems to create less issues than using powershell...
|
||||
command = `rmdir /S /Q "${dst}" & xcopy /Y /E "${src}" "${dst}"`;
|
||||
}
|
||||
else {
|
||||
command = `rm -rf '${dst}'; cp -rf '${src}' '${dst}'`;
|
||||
}
|
||||
|
||||
sudo.exec(command, { name: 'FCast Receiver' }, (error, stdout, stderr) => {
|
||||
if (error) {
|
||||
logger.error(error);
|
||||
logger.warn(`stdout: ${stdout}`);
|
||||
logger.warn(`stderr: ${stderr}`);
|
||||
reject('User did not authorize the operation...');
|
||||
}
|
||||
|
||||
logger.info('stdout', stdout);
|
||||
logger.info('stderr', stderr);
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
}
|
||||
else {
|
||||
logger.error(err);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
finally {
|
||||
process.noAsar = false;
|
||||
}
|
||||
}
|
||||
|
||||
public async update(): Promise<Boolean> {
|
||||
console.log("Updater invoked", { baseUrl: this.baseUrl, basePath: this.basePath });
|
||||
// Cannot use app.relaunch(...) since it breaks privilege escalation on Linux...
|
||||
// Also does not work very well on Mac...
|
||||
private static relaunch(binPath: string) {
|
||||
logger.info(`Relaunching app binary: ${binPath}`);
|
||||
log4js.shutdown();
|
||||
|
||||
const localPackage = JSON.parse(fs.readFileSync(path.join(this.basePath, './package.json'), 'utf-8'));
|
||||
const remotePackage = await this.fetchJSON(`${this.baseUrl}/package.json`.toString());
|
||||
let proc;
|
||||
if (process.platform === 'win32') {
|
||||
// cwd is bugged on Windows, perhaps due to needing to be in system32 to launch cmd.exe
|
||||
proc = cp.spawn(`"${binPath}"`, [], { stdio: 'ignore', shell: true, detached: true, windowsHide: true });
|
||||
}
|
||||
else if (process.platform === 'darwin') {
|
||||
proc = cp.spawn(`open '${binPath}'`, [], { cwd: path.dirname(binPath), shell: true, stdio: 'ignore', detached: true });
|
||||
}
|
||||
else {
|
||||
proc = cp.spawn(binPath, [], { cwd: path.dirname(binPath), shell: true, stdio: 'ignore', detached: true });
|
||||
}
|
||||
|
||||
console.log('Update check', { localVersion: localPackage.version, remoteVersion: remotePackage.version });
|
||||
if (this.compareVersions(remotePackage.version, localPackage.version) === 1) {
|
||||
for (const file of this.appFiles) {
|
||||
const fileUrl = `${this.baseUrl}/${file}`;
|
||||
const destination = path.join(this.basePath, file);
|
||||
proc.unref();
|
||||
app.exit();
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`Downloading '${fileUrl}' to '${destination}'.`);
|
||||
await this.downloadFile(fileUrl.toString(), destination);
|
||||
public static restart() {
|
||||
if (!Updater.isRestarting) {
|
||||
Updater.isRestarting = true;
|
||||
const updateInfo: UpdateInfo = JSON.parse(fs.readFileSync(Updater.updateMetadataPath, 'utf8'));
|
||||
const extractionDir = process.platform === 'darwin' ? 'FCast Receiver.app' : `fcast-receiver-${process.platform}-${process.arch}`;
|
||||
const binaryName = process.platform === 'win32' ? 'fcast-receiver.exe' : 'fcast-receiver';
|
||||
const updateBinPath = process.platform === 'darwin' ? path.join(updateInfo.tempPath, extractionDir) : path.join(updateInfo.tempPath, extractionDir, binaryName);
|
||||
|
||||
Updater.relaunch(updateBinPath);
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
public static isUpdating(): boolean {
|
||||
try {
|
||||
const updateInfo: UpdateInfo = JSON.parse(fs.readFileSync(Updater.updateMetadataPath, 'utf8'));
|
||||
Updater.updateError = true;
|
||||
return updateInfo.updateState !== 'error';
|
||||
}
|
||||
catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public static getChannelVersion(): string {
|
||||
if (Updater.localPackageJson === null) {
|
||||
Updater.localPackageJson = JSON.parse(fs.readFileSync(path.join(Updater.appPath, './package.json'), 'utf8'));
|
||||
Updater.localPackageJson.channelVersion = Updater.localPackageJson.channelVersion ? Updater.localPackageJson.channelVersion : 0
|
||||
}
|
||||
|
||||
return Updater.localPackageJson.channelVersion;
|
||||
}
|
||||
|
||||
public static async processUpdate(): Promise<void> {
|
||||
try {
|
||||
const updateInfo: UpdateInfo = JSON.parse(fs.readFileSync(Updater.updateMetadataPath, 'utf8'));
|
||||
const extractionDir = process.platform === 'darwin' ? 'FCast Receiver.app' : `fcast-receiver-${process.platform}-${process.arch}`;
|
||||
const binaryName = process.platform === 'win32' ? 'fcast-receiver.exe' : 'fcast-receiver';
|
||||
const installBinPath = process.platform === 'darwin' ? updateInfo.installPath : path.join(updateInfo.installPath, binaryName);
|
||||
|
||||
switch (updateInfo.updateState) {
|
||||
case UpdateState.Copy: {
|
||||
try {
|
||||
logger.info('Updater process started...');
|
||||
const src = path.join(updateInfo.tempPath, extractionDir);
|
||||
logger.info(`Copying files from update directory ${src} to install directory ${updateInfo.installPath}`);
|
||||
|
||||
await Updater.applyUpdate(src, updateInfo.installPath);
|
||||
updateInfo.updateState = UpdateState.Cleanup;
|
||||
fs.writeFileSync(Updater.updateMetadataPath, JSON.stringify(updateInfo));
|
||||
}
|
||||
catch (err) {
|
||||
logger.error('Error while applying update...');
|
||||
logger.error(err);
|
||||
|
||||
updateInfo.updateState = UpdateState.Error;
|
||||
updateInfo.error = JSON.stringify(err);
|
||||
fs.writeFileSync(Updater.updateMetadataPath, JSON.stringify(updateInfo));
|
||||
}
|
||||
|
||||
Updater.relaunch(installBinPath);
|
||||
return;
|
||||
}
|
||||
|
||||
case UpdateState.Cleanup: {
|
||||
try {
|
||||
logger.info('Performing update cleanup...')
|
||||
// Electron runtime sees .asar file as directory and causes errors during copy
|
||||
process.noAsar = true
|
||||
logger.info(`rm dir ${path.join(Updater.updateDataPath, extractionDir)}`)
|
||||
fs.rmSync(path.join(Updater.updateDataPath, extractionDir), { recursive: true, force: true });
|
||||
process.noAsar = false
|
||||
|
||||
fs.rmSync(path.join(Updater.updateDataPath, updateInfo.downloadFile));
|
||||
fs.rmSync(Updater.updateMetadataPath);
|
||||
|
||||
// Removing the install directory causes an 'ENOENT: no such file or directory, uv_cwd' when calling process.cwd()
|
||||
// Need to fix the working directory to the update directory that overwritten the install directory
|
||||
process.chdir(Updater.installPath);
|
||||
}
|
||||
catch (err) {
|
||||
logger.error('Error while performing update cleanup...');
|
||||
logger.error(err);
|
||||
|
||||
updateInfo.updateState = UpdateState.Error;
|
||||
updateInfo.error = JSON.stringify(err);
|
||||
fs.writeFileSync(Updater.updateMetadataPath, JSON.stringify(updateInfo));
|
||||
}
|
||||
|
||||
Updater.relaunch(installBinPath);
|
||||
return;
|
||||
}
|
||||
|
||||
case UpdateState.Error:
|
||||
logger.warn(`Update operation did not complete successfully: ${updateInfo.error}`);
|
||||
break;
|
||||
}
|
||||
}
|
||||
catch (err) {
|
||||
logger.warn(`Error reading update metadata file, ignoring pending update: ${err}`);
|
||||
}
|
||||
}
|
||||
|
||||
public static async checkForUpdates(): Promise<boolean> {
|
||||
logger.info('Checking for updates...');
|
||||
Updater.localPackageJson = JSON.parse(fs.readFileSync(path.join(Updater.appPath, './package.json'), 'utf8'));
|
||||
|
||||
try {
|
||||
Updater.releasesJson = await Updater.fetchJSON(`${Updater.baseUrl}/releases_v${Updater.supportedReleasesJsonVersion}.json`.toString()) as ReleaseInfo;
|
||||
|
||||
let updaterSettings = Store.get('updater');
|
||||
if (updaterSettings === null) {
|
||||
updaterSettings = {
|
||||
'channel': Updater.localPackageJson.channel,
|
||||
}
|
||||
|
||||
Store.set('updater', updaterSettings);
|
||||
}
|
||||
else {
|
||||
Updater.localPackageJson.channel = updaterSettings.channel;
|
||||
}
|
||||
|
||||
return true;
|
||||
const localChannelVersion: number = Updater.localPackageJson.channelVersion ? Updater.localPackageJson.channelVersion : 0;
|
||||
const currentChannelVersion: number = Updater.releasesJson.channelCurrentVersions[Updater.localPackageJson.channel] ? Updater.releasesJson.channelCurrentVersions[Updater.localPackageJson.channel] : 0;
|
||||
logger.info('Update check', { channel: Updater.localPackageJson.channel, channel_version: localChannelVersion, localVersion: Updater.localPackageJson.version,
|
||||
currentVersion: Updater.releasesJson.currentVersion, currentChannelVersion: currentChannelVersion });
|
||||
|
||||
if (Updater.localPackageJson.version !== Updater.releasesJson.currentVersion || (Updater.localPackageJson.channel !== 'stable' && localChannelVersion < currentChannelVersion)) {
|
||||
logger.info('Update available...');
|
||||
return true;
|
||||
}
|
||||
}
|
||||
catch (err) {
|
||||
logger.error(`Failed to check for updates: ${err}`);
|
||||
throw 'Please try again later or visit https://fcast.org for updates.';
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public static async downloadUpdate(): Promise<boolean> {
|
||||
try {
|
||||
fs.accessSync(Updater.updateDataPath, fs.constants.F_OK);
|
||||
}
|
||||
catch (err) {
|
||||
logger.info(`Directory does not exist: ${err}`);
|
||||
fs.mkdirSync(Updater.updateDataPath);
|
||||
}
|
||||
|
||||
try {
|
||||
const channel = Updater.localPackageJson.version !== Updater.releasesJson.currentVersion ? 'stable' : Updater.localPackageJson.channel;
|
||||
const fileInfo = Updater.releasesJson.currentReleases[channel][process.platform][process.arch]
|
||||
const file = fileInfo.url.toString().split('/').pop();
|
||||
|
||||
const destination = path.join(Updater.updateDataPath, file);
|
||||
logger.info(`Downloading '${fileInfo.url}' to '${destination}'.`);
|
||||
Updater.isDownloading = true;
|
||||
await Updater.downloadFile(fileInfo.url.toString(), destination);
|
||||
|
||||
const downloadedFile = await fs.promises.readFile(destination);
|
||||
const hash = crypto.createHash('sha256').end(downloadedFile).digest('hex');
|
||||
if (fileInfo.sha256Digest !== hash) {
|
||||
const message = 'Update failed integrity check. Please try again later or visit https://fcast.org to for updates.';
|
||||
logger.error(`Update failed integrity check. Expected hash: ${fileInfo.sha256Digest}, actual hash: ${hash}`);
|
||||
throw message;
|
||||
}
|
||||
|
||||
// Electron runtime sees .asar file as directory and causes errors during extraction
|
||||
logger.info('Extracting update...');
|
||||
process.noAsar = true;
|
||||
await extract(destination, { dir: path.dirname(destination) });
|
||||
process.noAsar = false;
|
||||
|
||||
logger.info('Extraction complete.');
|
||||
const updateInfo: UpdateInfo = {
|
||||
updateState: UpdateState.Copy,
|
||||
installPath: Updater.installPath,
|
||||
tempPath: path.dirname(destination),
|
||||
currentVersion: Updater.releasesJson.currentVersion,
|
||||
downloadFile: file,
|
||||
};
|
||||
|
||||
fs.writeFileSync(Updater.updateMetadataPath, JSON.stringify(updateInfo));
|
||||
logger.info('Written update metadata.');
|
||||
Updater.isDownloading = false;
|
||||
Updater.updateDownloaded = true;
|
||||
return true;
|
||||
}
|
||||
catch (err) {
|
||||
Updater.isDownloading = false;
|
||||
process.noAsar = false;
|
||||
logger.error(`Failed to download update: ${err}`);
|
||||
throw 'Failed to download update. Please try again later or visit https://fcast.org to download.';
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,7 +8,7 @@ export class WebSocketListenerService {
|
|||
public static PORT = 46898;
|
||||
|
||||
emitter = new EventEmitter();
|
||||
|
||||
|
||||
private server: WebSocketServer;
|
||||
private sessions: FCastSession[] = [];
|
||||
|
||||
|
@ -38,14 +38,14 @@ export class WebSocketListenerService {
|
|||
try {
|
||||
session.send(opcode, message);
|
||||
} catch (e) {
|
||||
console.warn("Failed to send error.", e);
|
||||
Main.logger.warn("Failed to send error.", e);
|
||||
session.close();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private async handleServerError(err: NodeJS.ErrnoException) {
|
||||
console.error("Server error:", err);
|
||||
Main.logger.error("Server error:", err);
|
||||
|
||||
const restartPrompt = await dialog.showMessageBox({
|
||||
type: 'error',
|
||||
|
@ -55,7 +55,7 @@ export class WebSocketListenerService {
|
|||
defaultId: 0,
|
||||
cancelId: 1
|
||||
});
|
||||
|
||||
|
||||
if (restartPrompt.response === 0) {
|
||||
Main.application.relaunch();
|
||||
Main.application.exit(0);
|
||||
|
@ -65,14 +65,14 @@ export class WebSocketListenerService {
|
|||
}
|
||||
|
||||
private handleConnection(socket: WebSocket) {
|
||||
console.log('New WebSocket connection');
|
||||
Main.logger.info('New WebSocket connection');
|
||||
|
||||
const session = new FCastSession(socket, (data) => socket.send(data));
|
||||
session.bindEvents(this.emitter);
|
||||
this.sessions.push(session);
|
||||
|
||||
socket.on("error", (err) => {
|
||||
console.warn(`Error.`, err);
|
||||
Main.logger.warn(`Error.`, err);
|
||||
session.close();
|
||||
});
|
||||
|
||||
|
@ -81,28 +81,28 @@ export class WebSocketListenerService {
|
|||
if (data instanceof Buffer) {
|
||||
session.processBytes(data);
|
||||
} else {
|
||||
console.warn("Received unhandled string message", data);
|
||||
Main.logger.warn("Received unhandled string message", data);
|
||||
}
|
||||
} catch (e) {
|
||||
console.warn(`Error while handling packet.`, e);
|
||||
Main.logger.warn(`Error while handling packet.`, e);
|
||||
session.close();
|
||||
}
|
||||
});
|
||||
|
||||
socket.on("close", () => {
|
||||
console.log('WebSocket connection closed');
|
||||
Main.logger.info('WebSocket connection closed');
|
||||
|
||||
const index = this.sessions.indexOf(session);
|
||||
if (index != -1) {
|
||||
this.sessions.splice(index, 1);
|
||||
this.sessions.splice(index, 1);
|
||||
}
|
||||
});
|
||||
|
||||
try {
|
||||
console.log('Sending version');
|
||||
Main.logger.info('Sending version');
|
||||
session.send(Opcode.Version, {version: 2});
|
||||
} catch (e) {
|
||||
console.log('Failed to send version');
|
||||
Main.logger.info('Failed to send version');
|
||||
}
|
||||
}
|
||||
}
|
|
@ -6,6 +6,12 @@ ipcRenderer.on("device-info", (_event, value) => {
|
|||
})
|
||||
|
||||
contextBridge.exposeInMainWorld('electronAPI', {
|
||||
updaterProgress: () => ipcRenderer.invoke('updater-progress'),
|
||||
onDeviceInfo: (callback) => ipcRenderer.on("device-info", callback),
|
||||
onUpdateAvailable: (callback) => ipcRenderer.on("update-available", callback),
|
||||
sendDownloadRequest: () => ipcRenderer.send('send-download-request'),
|
||||
onDownloadComplete: (callback) => ipcRenderer.on("download-complete", callback),
|
||||
onDownloadFailed: (callback) => ipcRenderer.on("download-failed", callback),
|
||||
sendRestartRequest: () => ipcRenderer.send('send-restart-request'),
|
||||
getDeviceInfo: () => deviceInfo,
|
||||
});
|
||||
|
|
|
@ -1,5 +1,15 @@
|
|||
import QRCode from 'qrcode';
|
||||
|
||||
const updateView = document.getElementById("update-view");
|
||||
const updateViewTitle = document.getElementById("update-view-title");
|
||||
const updateText = document.getElementById("update-text");
|
||||
const updateButton = document.getElementById("update-button");
|
||||
const restartButton = document.getElementById("restart-button");
|
||||
const updateLaterButton = document.getElementById("update-later-button");
|
||||
const progressBar = document.getElementById("progress-bar");
|
||||
const progressBarProgress = document.getElementById("progress-bar-progress");
|
||||
|
||||
let updaterProgressUIUpdateTimer = null;
|
||||
window.electronAPI.onDeviceInfo(renderIPsAndQRCode);
|
||||
|
||||
if(window.electronAPI.getDeviceInfo()) {
|
||||
|
@ -45,3 +55,57 @@ function renderIPsAndQRCode() {
|
|||
console.log(`Error rendering QR Code: ${e}`)
|
||||
});
|
||||
}
|
||||
|
||||
window.electronAPI.onUpdateAvailable(() => {
|
||||
console.log(`Received UpdateAvailable event`);
|
||||
updateViewTitle.textContent = 'FCast update available';
|
||||
|
||||
updateText.textContent = 'Do you wish to update now?';
|
||||
updateButton.setAttribute("style", "display: block");
|
||||
updateLaterButton.setAttribute("style", "display: block");
|
||||
restartButton.setAttribute("style", "display: none");
|
||||
progressBar.setAttribute("style", "display: none");
|
||||
updateView.setAttribute("style", "display: flex");
|
||||
});
|
||||
|
||||
window.electronAPI.onDownloadComplete(() => {
|
||||
console.log(`Received DownloadComplete event`);
|
||||
window.clearTimeout(updaterProgressUIUpdateTimer);
|
||||
updateViewTitle.textContent = 'FCast update ready';
|
||||
|
||||
updateText.textContent = 'Restart now to apply the changes?';
|
||||
updateButton.setAttribute("style", "display: none");
|
||||
progressBar.setAttribute("style", "display: none");
|
||||
restartButton.setAttribute("style", "display: block");
|
||||
updateLaterButton.setAttribute("style", "display: block");
|
||||
updateView.setAttribute("style", "display: flex");
|
||||
});
|
||||
|
||||
window.electronAPI.onDownloadFailed(() => {
|
||||
console.log(`Received DownloadFailed event`);
|
||||
window.clearTimeout(updaterProgressUIUpdateTimer);
|
||||
updateView.setAttribute("style", "display: none");
|
||||
});
|
||||
|
||||
updateLaterButton.onclick = () => { updateView.setAttribute("style", "display: none"); };
|
||||
updateButton.onclick = () => {
|
||||
updaterProgressUIUpdateTimer = window.setInterval( async () => {
|
||||
const updateProgress = await window.electronAPI.updaterProgress();
|
||||
|
||||
if (updateProgress >= 1.0) {
|
||||
updateText.textContent = "Preparing update...";
|
||||
progressBarProgress.setAttribute("style", `width: 100%`);
|
||||
}
|
||||
else {
|
||||
progressBarProgress.setAttribute("style", `width: ${Math.max(12, updateProgress * 100)}%`);
|
||||
}
|
||||
}, 500);
|
||||
|
||||
updateText.textContent = 'Downloading...';
|
||||
updateButton.setAttribute("style", "display: none");
|
||||
updateLaterButton.setAttribute("style", "display: none");
|
||||
progressBarProgress.setAttribute("style", "width: 12%");
|
||||
progressBar.setAttribute("style", "display: block");
|
||||
window.electronAPI.sendDownloadRequest();
|
||||
};
|
||||
restartButton.onclick = () => { window.electronAPI.sendRestartRequest(); };
|
||||
|
|
|
@ -25,17 +25,25 @@
|
|||
<div id="spinner" class="lds-ring"><div></div><div></div><div></div><div></div></div>
|
||||
</div>
|
||||
|
||||
<!-- <div id="update-dialog">There is an update available. Do you wish to update?</div>
|
||||
<div id="update-button">Update</div>
|
||||
<div id="update-button">Later</div>
|
||||
<div id="progress-container">
|
||||
<div id="update-spinner" class="lds-ring"><div></div><div></div><div></div><div></div></div>
|
||||
<div id="progress-text"></div>
|
||||
</div> -->
|
||||
<div id="update-view" class="card">
|
||||
<div id="update-view-title" class="non-selectable card-title">FCast update available</div>
|
||||
<div class="card-title-separator"></div>
|
||||
|
||||
<div id="update-text">Do you wish to update now?</div>
|
||||
<div id="update-button-container">
|
||||
<div id="update-button" class="button button-primary">Update</div>
|
||||
<div id="restart-button" class="button button-primary">Restart</div>
|
||||
<div id="update-later-button" class="button button-secondary">Later</div>
|
||||
</div>
|
||||
|
||||
<div id="progress-bar">
|
||||
<div id="progress-bar-progress"></div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div id="detail-view">
|
||||
<div id="manual-connection-info" class="non-selectable">Manual connection information</div>
|
||||
<div id="manual-connection-info-separator"></div>
|
||||
<div id="detail-view" class="card">
|
||||
<div class="non-selectable card-title">Manual connection information</div>
|
||||
<div class="card-title-separator"></div>
|
||||
<div>
|
||||
<div id="ips">IPs</div><br />
|
||||
<div>Port<br>46899 (TCP), 46898 (WS)</div>
|
||||
|
|
|
@ -19,6 +19,70 @@ body, html {
|
|||
user-select: none;
|
||||
}
|
||||
|
||||
.card {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
text-align: center;
|
||||
|
||||
background-color: rgba(20, 20, 20, 0.5);
|
||||
padding: 25px;
|
||||
border-radius: 10px;
|
||||
border: 1px solid #2E2E2E;
|
||||
scrollbar-width: thin;
|
||||
overflow: auto;
|
||||
}
|
||||
|
||||
.card-title {
|
||||
font-weight: 700;
|
||||
line-height: 24px;
|
||||
margin: 10px;
|
||||
}
|
||||
|
||||
.card-title-separator {
|
||||
height: 1px;
|
||||
background: #2E2E2E;
|
||||
margin-top: 3px;
|
||||
margin-bottom: 3px;
|
||||
}
|
||||
|
||||
.button {
|
||||
display: inline-block;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
padding: 16px;
|
||||
gap: 6px;
|
||||
flex: 1 0 0;
|
||||
border-radius: 6px;
|
||||
|
||||
margin: 20px 10px;
|
||||
cursor: pointer;
|
||||
user-select: none;
|
||||
}
|
||||
|
||||
.button-primary {
|
||||
background: #008BD7;
|
||||
}
|
||||
|
||||
.button-primary:hover {
|
||||
background: #0D9DDF;
|
||||
}
|
||||
|
||||
.button-primary:active {
|
||||
background: #0069AA;
|
||||
}
|
||||
|
||||
.button-secondary {
|
||||
background: #3E3E3E;
|
||||
}
|
||||
|
||||
.button-secondary:hover {
|
||||
background: #555555;
|
||||
}
|
||||
|
||||
.button-secondary:active {
|
||||
background: #3E3E3E;
|
||||
}
|
||||
|
||||
#ui-container {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
|
@ -80,17 +144,6 @@ body, html {
|
|||
padding: 25px;
|
||||
}
|
||||
|
||||
#detail-view {
|
||||
text-align: center;
|
||||
|
||||
background-color: rgba(20, 20, 20, 0.5);
|
||||
padding: 25px;
|
||||
border-radius: 10px;
|
||||
border: 1px solid #2E2E2E;
|
||||
scrollbar-width: thin;
|
||||
overflow: auto;
|
||||
}
|
||||
|
||||
#manual-connection-info {
|
||||
font-weight: 700;
|
||||
line-height: 24px;
|
||||
|
@ -118,41 +171,59 @@ body, html {
|
|||
font-weight: bold;
|
||||
}
|
||||
|
||||
#update-dialog, #waiting-for-connection, #ips, #automatic-discovery {
|
||||
#waiting-for-connection, #ips, #automatic-discovery {
|
||||
margin-top: 20px;
|
||||
}
|
||||
|
||||
#update-text {
|
||||
margin-top: 20px;
|
||||
width: 320px;
|
||||
}
|
||||
|
||||
#update-view {
|
||||
display: none;
|
||||
}
|
||||
|
||||
#restart-button {
|
||||
display: none;
|
||||
}
|
||||
|
||||
#spinner {
|
||||
padding: 20px;
|
||||
}
|
||||
|
||||
#update-button {
|
||||
background: blue;
|
||||
padding: 10px 28px;
|
||||
margin-top: 20px;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
/* .button {
|
||||
display: inline-block;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
min-width: 100px;
|
||||
padding: 18px 16px;
|
||||
gap: 6px;
|
||||
flex: 1 0 0;
|
||||
border-radius: 6px;
|
||||
} */
|
||||
|
||||
|
||||
#progress-container {
|
||||
#update-button-container {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
margin-top: 8px;
|
||||
flex-direction: row;
|
||||
}
|
||||
|
||||
#progress-text {
|
||||
margin-left: 8px;
|
||||
#progress-bar {
|
||||
display: none;
|
||||
|
||||
width: 320px;
|
||||
height: 40px;
|
||||
margin-top: 20px;
|
||||
border-radius: 50px;
|
||||
border: 1px solid #4E4E4E;
|
||||
background: linear-gradient(rgba(20, 20, 20, 0.5), rgba(80, 80, 80, 0.5));
|
||||
/* background-size: cover; */
|
||||
}
|
||||
|
||||
#progress-bar-progress {
|
||||
width: 12%;
|
||||
height: 40px;
|
||||
border-radius: 50px;
|
||||
background-image: linear-gradient(to bottom, #008BD7 35%, #0069AA);
|
||||
transition: width .6s ease;
|
||||
}
|
||||
|
||||
@keyframes progress-bar-stripes {
|
||||
from {
|
||||
background-position: 1rem 0;
|
||||
}
|
||||
to {
|
||||
background-position: 0 0;
|
||||
}
|
||||
}
|
||||
|
||||
#window-can-be-closed {
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"compilerOptions": {
|
||||
"target": "es5",
|
||||
"module": "commonjs",
|
||||
"target": "ES2022",
|
||||
"module": "ES2022",
|
||||
"moduleResolution": "node",
|
||||
"sourceMap": false,
|
||||
"emitDecoratorMetadata": true,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue