mirror of
https://github.com/EmulatorJS/EmulatorJS.git
synced 2026-02-06 11:17:36 +00:00
Merge e55b1495e7 into 50cb990a87
This commit is contained in:
commit
eb5ea5f79f
26
.github/workflows/eslint.yml
vendored
Normal file
26
.github/workflows/eslint.yml
vendored
Normal file
@ -0,0 +1,26 @@
|
||||
name: ESLint
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
pull_request:
|
||||
branches: [ main ]
|
||||
|
||||
jobs:
|
||||
eslint:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm install
|
||||
|
||||
- name: Run ESLint
|
||||
run: npx eslint .
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@ -10,7 +10,10 @@ data/cores/*
|
||||
!data/cores/package.json
|
||||
!data/cores/.npmignore
|
||||
.DS_Store
|
||||
.hintrc
|
||||
.vscode/*
|
||||
!.vscode/settings.json
|
||||
!.vscode/extensions.json
|
||||
*.tgz
|
||||
dist/
|
||||
jsdoc/
|
||||
|
||||
5
.vscode/extensions.json
vendored
Normal file
5
.vscode/extensions.json
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
{
|
||||
"recommendations": [
|
||||
"dbaeumer.vscode-eslint"
|
||||
]
|
||||
}
|
||||
8
.vscode/settings.json
vendored
Normal file
8
.vscode/settings.json
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
{
|
||||
"diffEditor.ignoreTrimWhitespace": false,
|
||||
"editor.formatOnPaste": false,
|
||||
"editor.formatOnSave": false,
|
||||
"editor.formatOnSaveMode": "modifications",
|
||||
"eslint.enable": true,
|
||||
"eslint.lintTask.enable": true
|
||||
}
|
||||
34
build.js
34
build.js
@ -61,9 +61,9 @@ if (!build_type) {
|
||||
Seven.add(`dist/${version}.7z`, './', {
|
||||
$raw: ['-xr@dist/.ignore'],
|
||||
$progress: true
|
||||
}).on('progress', function (progress) {
|
||||
}).on('progress', (progress) => {
|
||||
progressData['7z'] = progress.percent;
|
||||
}).on('end', function() {
|
||||
}).on('end', () => {
|
||||
progressData['7z'] = 100;
|
||||
|
||||
});
|
||||
@ -71,9 +71,9 @@ if (!build_type) {
|
||||
Seven.add(`dist/${version}.zip`, './', {
|
||||
$raw: ['-xr@dist/.ignore'],
|
||||
$progress: true
|
||||
}).on('progress', function (progress) {
|
||||
}).on('progress', (progress) => {
|
||||
progressData['zip'] = progress.percent;
|
||||
}).on('end', function() {
|
||||
}).on('end', () => {
|
||||
progressData['zip'] = 100;
|
||||
});
|
||||
} else if (build_type !== "emulatorjs" && build_type !== "cores" && build_type !== "get-cores") {
|
||||
@ -99,14 +99,15 @@ if (!build_type) {
|
||||
return JSON.parse(fs.readFileSync(coresJsonPath, 'utf8'));
|
||||
};
|
||||
|
||||
if (build_type === "emulatorjs") {
|
||||
console.log(`Current EmulatorJS Version: ${version}`);
|
||||
removeLogo();
|
||||
console.log("Ready to build EmulatorJS!");
|
||||
} else if (build_type === "get-cores") {
|
||||
const cores = await getCores();
|
||||
console.log(JSON.stringify(cores.map(coreName => coreName.name)));
|
||||
} else if (build_type === "cores") {
|
||||
const main = async () => {
|
||||
if (build_type === "emulatorjs") {
|
||||
console.log(`Current EmulatorJS Version: ${version}`);
|
||||
removeLogo();
|
||||
console.log("Ready to build EmulatorJS!");
|
||||
} else if (build_type === "get-cores") {
|
||||
const cores = await getCores();
|
||||
console.log(JSON.stringify(cores.map(coreName => coreName.name)));
|
||||
} else if (build_type === "cores") {
|
||||
console.log(`Current EmulatorJS Version: ${version}`);
|
||||
console.log("Building cores...");
|
||||
const allCores = await getCores();
|
||||
@ -177,7 +178,10 @@ if (!build_type) {
|
||||
packageJson.dependencies[`@emulatorjs/core-${core}`] = "latest";
|
||||
fs.writeFileSync(packagePath, JSON.stringify(packageJson, null, 4), 'utf8');
|
||||
}
|
||||
console.log("EmulatorJS cores built successfully!");
|
||||
console.log("Ready to build EmulatorJS!");
|
||||
}
|
||||
console.log("EmulatorJS cores built successfully!");
|
||||
console.log("Ready to build EmulatorJS!");
|
||||
}
|
||||
};
|
||||
|
||||
main();
|
||||
}
|
||||
|
||||
@ -4,6 +4,8 @@
|
||||
"nipplejs.js",
|
||||
"shaders.js",
|
||||
"storage.js",
|
||||
"utils.js",
|
||||
"cache.js",
|
||||
"gamepad.js",
|
||||
"GameManager.js",
|
||||
"socket.io.min.js",
|
||||
@ -98,7 +100,7 @@
|
||||
config.fullscreenOnLoad = window.EJS_fullscreenOnLoaded;
|
||||
config.filePaths = window.EJS_paths;
|
||||
config.loadState = window.EJS_loadStateURL;
|
||||
config.cacheLimit = window.EJS_CacheLimit;
|
||||
config.cacheConfig = window.EJS_cacheConfig;
|
||||
config.cheats = window.EJS_cheats;
|
||||
config.defaultOptions = window.EJS_defaultOptions;
|
||||
config.gamePatchUrl = window.EJS_gamePatchUrl;
|
||||
@ -118,7 +120,6 @@
|
||||
config.externalFiles = window.EJS_externalFiles;
|
||||
config.dontExtractRom = window.EJS_dontExtractRom;
|
||||
config.dontExtractBIOS = window.EJS_dontExtractBIOS;
|
||||
config.disableDatabases = window.EJS_disableDatabases;
|
||||
config.disableLocalStorage = window.EJS_disableLocalStorage;
|
||||
config.forceLegacyCores = window.EJS_forceLegacyCores;
|
||||
config.noAutoFocus = window.EJS_noAutoFocus;
|
||||
|
||||
@ -87,34 +87,29 @@ class EJS_GameManager {
|
||||
return new Promise(async (resolve, reject) => {
|
||||
if (this.EJS.config.externalFiles && this.EJS.config.externalFiles.constructor.name === "Object") {
|
||||
for (const key in this.EJS.config.externalFiles) {
|
||||
await new Promise(done => {
|
||||
this.EJS.downloadFile(this.EJS.config.externalFiles[key], null, true, { responseType: "arraybuffer", method: "GET" }).then(async (res) => {
|
||||
if (res === -1) {
|
||||
if (this.EJS.debug) console.warn("Failed to fetch file from '" + this.EJS.config.externalFiles[key] + "'. Make sure the file exists.");
|
||||
return done();
|
||||
}
|
||||
await new Promise(async (done) => {
|
||||
try {
|
||||
const url = this.EJS.config.externalFiles[key];
|
||||
const cacheItem = await this.EJS.downloadFile(url, this.EJS.downloadType.support.name, "GET", {}, null, null, null, 30000, "arraybuffer", false, this.EJS.downloadType.support.dontCache);
|
||||
|
||||
let path = key;
|
||||
if (key.trim().endsWith("/")) {
|
||||
const invalidCharacters = /[#<$+%>!`&*'|{}/\\?"=@:^\r\n]/ig;
|
||||
let name = this.EJS.config.externalFiles[key].split("/").pop().split("#")[0].split("?")[0].replace(invalidCharacters, "").trim();
|
||||
if (!name) return done();
|
||||
const files = await this.EJS.checkCompression(new Uint8Array(res.data), this.EJS.localization("Decompress Game Assets"));
|
||||
if (files["!!notCompressedData"]) {
|
||||
path += name;
|
||||
} else {
|
||||
for (const k in files) {
|
||||
this.writeFile(path + k, files[k]);
|
||||
}
|
||||
return done();
|
||||
// Extract to directory
|
||||
for (let i = 0; i < cacheItem.files.length; i++) {
|
||||
const file = cacheItem.files[i];
|
||||
this.writeFile(path + file.filename, file.bytes);
|
||||
}
|
||||
} else {
|
||||
// Write single file (or first file from archive)
|
||||
if (cacheItem.files.length > 0) {
|
||||
this.writeFile(path, cacheItem.files[0].bytes);
|
||||
}
|
||||
}
|
||||
try {
|
||||
this.writeFile(path, new Uint8Array(res.data));
|
||||
} catch(e) {
|
||||
if (this.EJS.debug) console.warn("Failed to write file to '" + path + "'. Make sure there are no conflicting files.");
|
||||
}
|
||||
done();
|
||||
});
|
||||
} catch (e) {
|
||||
if (this.EJS.debug) console.warn("Failed to fetch file from '" + this.EJS.config.externalFiles[key] + "'. Make sure the file exists.", e);
|
||||
done();
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -350,35 +345,36 @@ IF EXIST AUTORUN.BAT AUTORUN.BAT
|
||||
return (fileNames.length === 1) ? baseFileName + "-0.cue" : baseFileName + ".m3u";
|
||||
}
|
||||
loadPpssppAssets() {
|
||||
return new Promise(resolve => {
|
||||
this.EJS.downloadFile("cores/ppsspp-assets.zip", null, false, { responseType: "arraybuffer", method: "GET" }).then((res) => {
|
||||
this.EJS.checkCompression(new Uint8Array(res.data), this.EJS.localization("Decompress Game Data")).then((pspassets) => {
|
||||
if (pspassets === -1) {
|
||||
this.EJS.textElem.innerText = this.localization("Network Error");
|
||||
this.EJS.textElem.style.color = "red";
|
||||
return;
|
||||
}
|
||||
this.mkdir("/PPSSPP");
|
||||
return new Promise(async (resolve, reject) => {
|
||||
try {
|
||||
const cacheItem = await this.EJS.downloader.downloadFile("data/cores/ppsspp-assets.zip", this.EJS.downloadType.core.name, "GET", {}, null, null, null, 30000, "arraybuffer", false, this.EJS.downloadType.core.dontCache);
|
||||
|
||||
for (const file in pspassets) {
|
||||
const data = pspassets[file];
|
||||
const path = "/PPSSPP/" + file;
|
||||
const paths = path.split("/");
|
||||
let cp = "";
|
||||
for (let i = 0; i < paths.length - 1; i++) {
|
||||
if (paths[i] === "") continue;
|
||||
cp += "/" + paths[i];
|
||||
if (!this.FS.analyzePath(cp).exists) {
|
||||
this.FS.mkdir(cp);
|
||||
}
|
||||
}
|
||||
if (!path.endsWith("/")) {
|
||||
this.FS.writeFile(path, data);
|
||||
console.log(cacheItem);
|
||||
|
||||
this.mkdir("/PPSSPP");
|
||||
|
||||
for (let i = 0; i < cacheItem.files.length; i++) {
|
||||
const file = cacheItem.files[i];
|
||||
const path = "/PPSSPP/" + file.filename;
|
||||
const paths = path.split("/");
|
||||
let cp = "";
|
||||
for (let j = 0; j < paths.length - 1; j++) {
|
||||
if (paths[j] === "") continue;
|
||||
cp += "/" + paths[j];
|
||||
if (!this.FS.analyzePath(cp).exists) {
|
||||
this.FS.mkdir(cp);
|
||||
}
|
||||
}
|
||||
resolve();
|
||||
})
|
||||
});
|
||||
if (!path.endsWith("/")) {
|
||||
this.FS.writeFile(path, file.bytes);
|
||||
}
|
||||
}
|
||||
resolve();
|
||||
} catch (error) {
|
||||
this.EJS.textElem.innerText = this.EJS.localization("Network Error");
|
||||
this.EJS.textElem.style.color = "red";
|
||||
reject(error);
|
||||
}
|
||||
})
|
||||
}
|
||||
setVSync(enabled) {
|
||||
@ -467,6 +463,24 @@ IF EXIST AUTORUN.BAT AUTORUN.BAT
|
||||
setAltKeyEnabled(enabled) {
|
||||
this.functions.setKeyboardEnabled(enabled === true ? 3 : 2);
|
||||
}
|
||||
listDir(path, indent = "") {
|
||||
try {
|
||||
const entries = this.FS.readdir(path);
|
||||
for (const entry of entries) {
|
||||
if (entry === "." || entry === "..") continue;
|
||||
const fullPath = path === "/" ? `/${entry}` : `${path}/${entry}`;
|
||||
const stat = this.FS.stat(fullPath);
|
||||
if (this.FS.isDir(stat.mode)) {
|
||||
console.log(`${indent}[DIR] ${fullPath}`);
|
||||
this.listDir(fullPath, indent + " ");
|
||||
} else {
|
||||
console.log(`${indent}${fullPath}`);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
console.warn("Error reading directory:", path, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
window.EJS_GameManager = EJS_GameManager;
|
||||
|
||||
635
data/src/cache.js
Normal file
635
data/src/cache.js
Normal file
@ -0,0 +1,635 @@
|
||||
/**
|
||||
* EJS Download Manager
|
||||
* Downloads files from a given URL when a download is requested.
|
||||
* The file is checked against the cache to avoid re-downloading files unnecessarily.
|
||||
* The following rules are tested when checking for an update:
|
||||
* 1. The URL is checked against the cache - if it doesn't exist, download it
|
||||
* 2. The cacheExpiry property is checked - if it exists and is in the future, use the cached version. Note: the cacheExpiry property is sent by the server in the Cache-Control or Expires headers. If these headers are not present, the cacheExpiry property will be set to 5 days in the future by default.
|
||||
* 3. If the cacheExpiry property is in the past or doesn't exist, a HEAD request is made to check the Last-Modified header against the cached version's added date. Falling back to downloading if Last-Modified is not present.
|
||||
* 4. If the Last-Modified date is newer than the cached version's added date, download the new version.
|
||||
* 5. If none of the above conditions are met, use the cached version.
|
||||
*/
|
||||
class EJS_Download {
|
||||
/**
|
||||
* Creates an instance of EJS_Download.
|
||||
* @param {EJS_Cache} storageCache - The cache instance to use for storing downloaded files.
|
||||
* @param {Object} EJS - The main EmulatorJS instance.
|
||||
*/
|
||||
constructor(storageCache = null, EJS = null) {
|
||||
this.storageCache = storageCache;
|
||||
this.EJS = EJS;
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles downloading non-http(s) URLs (blob:, data:, file:, etc.)
|
||||
* @param {string} url - The non-http(s) URL to fetch
|
||||
* @param {string} type - The type of the file
|
||||
* @param {string} method - The HTTP method (HEAD returns empty, others fetch)
|
||||
* @param {string} responseType - The response type ("arraybuffer" or "text")
|
||||
* @returns {Promise<EJS_CacheItem|null>} - The fetched data as a cache item, or null for HEAD requests
|
||||
*/
|
||||
async handleNonHttpUrl(url, type, method = "GET", responseType = "arraybuffer") {
|
||||
console.log("[EJS Download] Handling non-http(s) URL:", url);
|
||||
|
||||
if (method === "HEAD") {
|
||||
// HEAD requests just return empty for non-http URLs
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
let res = await fetch(url);
|
||||
let data;
|
||||
|
||||
if (responseType === "arraybuffer" || !responseType) {
|
||||
data = await res.arrayBuffer();
|
||||
data = new Uint8Array(data);
|
||||
} else {
|
||||
data = await res.text();
|
||||
// Try to parse as JSON if it looks like JSON
|
||||
try { data = JSON.parse(data) } catch(e) {}
|
||||
}
|
||||
|
||||
// Clean up blob URLs to free memory
|
||||
if (url.startsWith("blob:")) {
|
||||
URL.revokeObjectURL(url);
|
||||
}
|
||||
|
||||
// Create a cache item for consistency
|
||||
const filename = url.split("/").pop() || "downloaded.bin";
|
||||
const now = Date.now();
|
||||
|
||||
// Ensure data is Uint8Array for file item
|
||||
let fileData;
|
||||
if (data instanceof Uint8Array) {
|
||||
fileData = data;
|
||||
} else if (typeof data === "string") {
|
||||
const encoder = new TextEncoder();
|
||||
fileData = encoder.encode(data);
|
||||
} else if (data instanceof ArrayBuffer) {
|
||||
fileData = new Uint8Array(data);
|
||||
} else {
|
||||
const encoder = new TextEncoder();
|
||||
fileData = encoder.encode(String(data));
|
||||
}
|
||||
|
||||
const files = [new EJS_FileItem(filename, fileData)];
|
||||
const key = this.storageCache ? this.storageCache.generateCacheKey(fileData) : "temp-" + Date.now();
|
||||
|
||||
// Don't cache non-http URLs (they're typically temporary or special)
|
||||
return new EJS_CacheItem(key, files, now, type, responseType, filename, url, null);
|
||||
} catch(e) {
|
||||
console.error("[EJS Download] Failed to fetch non-http URL:", url, e);
|
||||
throw new Error(`Failed to fetch non-http URL: ${e}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Downloads a file from the given URL with the specified options.
|
||||
* Automatically detects and handles both http(s) and non-http(s) URLs (blob:, data:, etc.)
|
||||
* @param {string} url - The URL to download the file from.
|
||||
* @param {string} type - The type of the file to download (e.g. "ROM", "CORE", "BIOS", etc).
|
||||
* @param {string} method - The HTTP method to use (default is "GET").
|
||||
* @param {Array} headers - An array of headers to include in the request.
|
||||
* @param {*} body - The body of the request (for POST/PUT requests).
|
||||
* @param {*} onProgress - Callback function for progress updates - returns status(downloading or decompressing), percentage, loaded bytes, total bytes.
|
||||
* @param {*} onComplete - Callback function when download is complete - returns success boolean, response data or error message.
|
||||
* @param {Number} timeout - Timeout in milliseconds (default is 30000ms).
|
||||
* @param {string} responseType - The response type (default is "arraybuffer").
|
||||
* @param {boolean} forceExtract - Whether to force extraction of compressed files regardless of extension (default is false).
|
||||
* @param {boolean} dontCache - If true, the downloaded file will not be cached (default is false).
|
||||
* @returns {Promise<EJS_CacheItem>} - The downloaded file as an EJS_CacheItem.
|
||||
*/
|
||||
downloadFile(url, type, method = "GET", headers = {}, body = null, onProgress = null, onComplete = null, timeout = 30000, responseType = "arraybuffer", forceExtract = false, dontCache = false) {
|
||||
let cacheActiveText = " (cache usage requested)"
|
||||
if (dontCache) {
|
||||
cacheActiveText = "";
|
||||
}
|
||||
console.log("[EJS Download] Downloading " + responseType + " file: " + url + cacheActiveText);
|
||||
return new Promise(async (resolve, reject) => {
|
||||
try {
|
||||
// Check if this is a non-http(s) URL (blob:, data:, file:, etc.)
|
||||
let urlObj;
|
||||
try { urlObj = new URL(url) } catch(e) {};
|
||||
|
||||
if (urlObj && !["http:", "https:"].includes(urlObj.protocol)) {
|
||||
// Handle non-http(s) URLs directly
|
||||
const result = await this.handleNonHttpUrl(url, type, method, responseType);
|
||||
resolve(result);
|
||||
return;
|
||||
}
|
||||
|
||||
// Use the provided storageCache or create a temporary one
|
||||
if (!this.storageCache) {
|
||||
console.warn("No storageCache provided to EJS_Download, downloads will not be cached");
|
||||
}
|
||||
|
||||
let cached = null;
|
||||
if (this.storageCache) {
|
||||
cached = await this.storageCache.get(url, false, "url");
|
||||
}
|
||||
const now = Date.now();
|
||||
if (cached) {
|
||||
if (cached.cacheExpiry && cached.cacheExpiry > now) {
|
||||
if (this.debug) console.log("Using cached version of", url);
|
||||
resolve(cached);
|
||||
return;
|
||||
}
|
||||
let lastModified = null;
|
||||
try {
|
||||
const headResp = await fetch(url, { method: "HEAD", headers });
|
||||
lastModified = headResp.headers.get("Last-Modified");
|
||||
} catch (e) { }
|
||||
if (lastModified) {
|
||||
const lastModTime = Date.parse(lastModified);
|
||||
if (!isNaN(lastModTime) && lastModTime <= cached.added) {
|
||||
if (this.debug) console.log("Using cached version of", url);
|
||||
resolve(cached);
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
if (this.debug) console.log("Using cached version of", url);
|
||||
resolve(cached);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (onProgress) onProgress("downloading", 0, 0, 0);
|
||||
let controller = new AbortController();
|
||||
let timer = setTimeout(() => controller.abort(), timeout);
|
||||
let resp, data, filename = url.split("/").pop() || "downloaded.bin";
|
||||
let cacheExpiry = null;
|
||||
try {
|
||||
resp = await fetch(url, {
|
||||
method,
|
||||
headers,
|
||||
body,
|
||||
signal: controller.signal
|
||||
});
|
||||
clearTimeout(timer);
|
||||
if (!resp.ok) throw new Error(`HTTP ${resp.status}`);
|
||||
const cd = resp.headers.get("Content-Disposition");
|
||||
if (cd) {
|
||||
const match = cd.match(/filename="?([^";]+)"?/);
|
||||
if (match) filename = match[1];
|
||||
}
|
||||
const cacheControl = resp.headers.get("Cache-Control");
|
||||
const expires = resp.headers.get("Expires");
|
||||
if (cacheControl && /max-age=(\d+)/.test(cacheControl)) {
|
||||
const maxAge = parseInt(cacheControl.match(/max-age=(\d+)/)[1]);
|
||||
cacheExpiry = now + maxAge * 1000;
|
||||
} else if (expires) {
|
||||
const exp = Date.parse(expires);
|
||||
if (!isNaN(exp)) cacheExpiry = exp;
|
||||
} else {
|
||||
// default to 5 days if no cache headers present
|
||||
cacheExpiry = now + 5 * 24 * 60 * 60 * 1000;
|
||||
}
|
||||
if (responseType === "arraybuffer") {
|
||||
let contentLength = 0;
|
||||
if (resp.headers.get("Content-Length")) {
|
||||
try {
|
||||
const parsedContentLength = parseInt(resp.headers.get("Content-Length"));
|
||||
if (!isNaN(parsedContentLength) && parsedContentLength > 0) {
|
||||
contentLength = parsedContentLength;
|
||||
}
|
||||
} catch (e) {
|
||||
// swallow any errors parsing content length
|
||||
}
|
||||
}
|
||||
const reader = resp.body.getReader();
|
||||
let received = 0;
|
||||
let chunks = [];
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) break;
|
||||
chunks.push(value);
|
||||
received += value.length;
|
||||
if (onProgress && contentLength) {
|
||||
onProgress("downloading", Math.floor(received / contentLength * 100), received, contentLength);
|
||||
}
|
||||
}
|
||||
|
||||
const blob = new Blob(chunks);
|
||||
const ab = await blob.arrayBuffer();
|
||||
data = new Uint8Array(ab);
|
||||
} else {
|
||||
data = await resp[responseType]();
|
||||
}
|
||||
} catch (err) {
|
||||
clearTimeout(timer);
|
||||
reject(`Download failed: ${err}`);
|
||||
return;
|
||||
}
|
||||
|
||||
let files = [];
|
||||
const ext = filename.toLowerCase().split('.').pop();
|
||||
if (responseType === "arraybuffer") {
|
||||
if (["zip", "7z", "rar"].includes(ext) || forceExtract) {
|
||||
if (onProgress) onProgress("decompressing", 0, 0, 0);
|
||||
try {
|
||||
const compression = new window.EJS_COMPRESSION(this.EJS);
|
||||
await compression.decompress(data, (msg, isProgress) => {
|
||||
if (onProgress && isProgress) {
|
||||
const percent = parseInt(msg);
|
||||
onProgress("decompressing", isNaN(percent) ? 0 : percent, 0, 0);
|
||||
}
|
||||
}, (fname, fileData) => {
|
||||
files.push(new EJS_FileItem(fname, fileData instanceof Uint8Array ? fileData : new Uint8Array(fileData)));
|
||||
});
|
||||
} catch (e) {
|
||||
reject(`Decompression failed: ${e}`);
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
files = [new EJS_FileItem(filename, data instanceof Uint8Array ? data : new Uint8Array(data))];
|
||||
}
|
||||
} else {
|
||||
// for non-arraybuffer types, just store the raw data as a single file
|
||||
files = [new EJS_FileItem(filename, data)];
|
||||
if (typeof data === "string") {
|
||||
// Encode string as UTF-8 Uint8Array
|
||||
const encoder = new TextEncoder();
|
||||
files = [new EJS_FileItem(filename, encoder.encode(data))];
|
||||
} else if (data instanceof Uint8Array) {
|
||||
files = [new EJS_FileItem(filename, data)];
|
||||
} else if (data instanceof ArrayBuffer) {
|
||||
files = [new EJS_FileItem(filename, new Uint8Array(data))];
|
||||
} else {
|
||||
// Fallback: try to convert to string then encode
|
||||
const encoder = new TextEncoder();
|
||||
files = [new EJS_FileItem(filename, encoder.encode(String(data)))];
|
||||
}
|
||||
}
|
||||
|
||||
if (onProgress) onProgress("complete", 100, data.byteLength || 0, data.byteLength || 0);
|
||||
|
||||
// Store in cache if available
|
||||
if (this.storageCache) {
|
||||
const key = this.storageCache.generateCacheKey(files[0].bytes);
|
||||
const cacheItem = new EJS_CacheItem(key, files, now, type, responseType, filename, url, cacheExpiry);
|
||||
if (dontCache === false) {
|
||||
await this.storageCache.put(cacheItem);
|
||||
}
|
||||
resolve(cacheItem);
|
||||
} else {
|
||||
// Return a temporary cache item if no cache available
|
||||
const key = "temp-" + Date.now();
|
||||
const cacheItem = new EJS_CacheItem(key, files, now, type, responseType, filename, url, cacheExpiry);
|
||||
resolve(cacheItem);
|
||||
}
|
||||
} catch (err) {
|
||||
reject(err.toString());
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* EJS_Cache
|
||||
* Manages a cache of files using IndexedDB for storage.
|
||||
*/
|
||||
class EJS_Cache {
|
||||
/**
|
||||
* Creates an instance of EJS_Cache.
|
||||
* @param {boolean} enabled - Whether caching is enabled.
|
||||
* @param {string} databaseName - Name of the IndexedDB database to use for caching.
|
||||
* @param {number} maxSizeMB - Maximum size of the cache in megabytes.
|
||||
* @param {number} maxAgeMins - Maximum age of items (in minutes) before they are cleaned up.
|
||||
*/
|
||||
constructor(enabled = true, databaseName, maxSizeMB = 4096, maxAgeMins = 7200, debug = false) {
|
||||
this.enabled = enabled;
|
||||
this.databaseName = databaseName;
|
||||
this.maxSizeMB = maxSizeMB;
|
||||
this.maxAgeMins = maxAgeMins;
|
||||
this.minAgeMins = Math.max(60, maxAgeMins * 0.1); // Minimum 1 hour, or 10% of max age
|
||||
this.debug = debug;
|
||||
|
||||
this.utils = new EJS_UTILS();
|
||||
|
||||
/**
|
||||
* Indicates whether the startup cleanup has been completed.
|
||||
*/
|
||||
this.startupCleanupCompleted = false;
|
||||
|
||||
if (this.debug) {
|
||||
console.log("Initialized EJS_Cache with settings:", {
|
||||
enabled: this.enabled,
|
||||
databaseName: this.databaseName,
|
||||
maxSizeMB: this.maxSizeMB,
|
||||
maxAgeMins: this.maxAgeMins,
|
||||
minAgeMins: this.minAgeMins
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Initializes the IndexedDB database and object stores.
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async createCacheDatabase() {
|
||||
if (!this.enabled) return;
|
||||
|
||||
if (this.storage && this.blobStorage) return;
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const indexes = ["type", "url"];
|
||||
const request = window.indexedDB.open(this.databaseName, 1);
|
||||
|
||||
request.onupgradeneeded = (event) => {
|
||||
const db = event.target.result;
|
||||
// Create metadata stores
|
||||
const cacheStore = db.createObjectStore("cache");
|
||||
// Create indexes for cache store if indexes array is present
|
||||
if (Array.isArray(indexes)) {
|
||||
for (const idx of indexes) {
|
||||
if (!cacheStore.indexNames.contains(idx)) {
|
||||
cacheStore.createIndex(idx, idx, { unique: false });
|
||||
}
|
||||
}
|
||||
}
|
||||
// Create blobs store
|
||||
db.createObjectStore("blobs");
|
||||
};
|
||||
|
||||
request.onsuccess = (event) => {
|
||||
this.storage = new EJS_STORAGE(this.databaseName, "cache", indexes);
|
||||
this.blobStorage = new EJS_STORAGE(this.databaseName, "blobs");
|
||||
resolve();
|
||||
};
|
||||
|
||||
request.onerror = (event) => {
|
||||
console.error("Error creating cache database:", event);
|
||||
reject(event);
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a cache key for the given data array.
|
||||
* @param {Uint8Array} dataArray
|
||||
* @returns {string} The generated cache key.
|
||||
*/
|
||||
generateCacheKey(dataArray) {
|
||||
let hash = this.utils.simpleHash(dataArray);
|
||||
const compressionCacheKey = "Obj-" + hash + "-" + dataArray.length;
|
||||
return compressionCacheKey;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves an item from the cache.
|
||||
* @param {*} key - The unique key identifying the cached item.
|
||||
* @param {boolean} [metadataOnly=false] - If true, only retrieves metadata without file data.
|
||||
* @param {string|null} indexName - Optional index name to search by (e.g., 'url') - leave null to search by primary key.
|
||||
* @returns {Promise<EJS_CacheItem|null>} - The cached item or null if not found.
|
||||
*/
|
||||
async get(key, metadataOnly = false, indexName = null) {
|
||||
if (!this.enabled) return null;
|
||||
|
||||
// ensure database is created
|
||||
await this.createCacheDatabase();
|
||||
|
||||
// clean up cache on first get if not already done
|
||||
if (!this.startupCleanupCompleted) {
|
||||
await this.cleanup();
|
||||
this.startupCleanupCompleted = true;
|
||||
}
|
||||
|
||||
const item = await this.storage.get(key, indexName);
|
||||
// if the item exists, update its lastAccessed time and return cache item
|
||||
if (item) {
|
||||
item.lastAccessed = Date.now();
|
||||
await this.storage.put(item.key, item);
|
||||
|
||||
if (!metadataOnly) {
|
||||
// get the blob from cache-blobs
|
||||
item.files = await this.blobStorage.get(item.key);
|
||||
}
|
||||
}
|
||||
|
||||
if (item) {
|
||||
const cacheItem = new EJS_CacheItem(item.key, item.files, item.added, item.type, item.responseType, item.filename, item.url, item.cacheExpiry, item.lastAccessed);
|
||||
cacheItem.source = "cache";
|
||||
return cacheItem;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Stores an item in the cache.
|
||||
* @param {EJS_CacheItem} item - The cache item to store.
|
||||
*/
|
||||
async put(item) {
|
||||
if (!this.enabled) return;
|
||||
|
||||
// ensure database is created
|
||||
await this.createCacheDatabase();
|
||||
|
||||
// before putting, ensure item is of type EJS_CacheItem
|
||||
if (!(item instanceof EJS_CacheItem)) {
|
||||
throw new Error("Item must be an instance of EJS_CacheItem");
|
||||
}
|
||||
|
||||
// check if the item exists, if so remove the existing item
|
||||
const existingItem = await this.get(item.key);
|
||||
if (existingItem) {
|
||||
await this.delete(item.key);
|
||||
}
|
||||
|
||||
// add file size attribute
|
||||
item.fileSize = item.size();
|
||||
|
||||
// check that the size of item.files does not cause the cache to exceed maxSizeMB
|
||||
let currentSize = 0;
|
||||
const allItems = await this.storage.getAll();
|
||||
for (let i = 0; i < allItems.length; i++) {
|
||||
if (allItems[i]) {
|
||||
currentSize += allItems[i].fileSize || 0;
|
||||
}
|
||||
}
|
||||
if ((currentSize + item.fileSize) > (this.maxSizeMB * 1024 * 1024)) {
|
||||
// exceeded max size, keep removing oldest items until we are under maxSizeMB + the size of the new item
|
||||
const itemsToRemove = [];
|
||||
let sizeToFree = (currentSize + item.fileSize) - (this.maxSizeMB * 1024 * 1024);
|
||||
for (let i = 0; i < allItems.length; i++) {
|
||||
if (allItems[i]) {
|
||||
itemsToRemove.push({ item: allItems[i], size: allItems[i].fileSize || 0 });
|
||||
}
|
||||
}
|
||||
itemsToRemove.sort((a, b) => a.item.lastAccessed - b.item.lastAccessed); // oldest first
|
||||
for (let i = 0; i < itemsToRemove.length; i++) {
|
||||
if (sizeToFree <= 0) break;
|
||||
await this.delete(itemsToRemove[i].item.key);
|
||||
sizeToFree -= itemsToRemove[i].size;
|
||||
}
|
||||
}
|
||||
|
||||
// store the metadata in cache
|
||||
await this.storage.put(item.key, {
|
||||
key: item.key,
|
||||
fileSize: item.fileSize,
|
||||
added: item.added,
|
||||
lastAccessed: item.lastAccessed,
|
||||
type: item.type,
|
||||
responseType: item.responseType,
|
||||
filename: item.filename,
|
||||
url: item.url,
|
||||
cacheExpiry: item.cacheExpiry
|
||||
});
|
||||
|
||||
// store the files in cache-blobs
|
||||
await this.blobStorage.put(item.key, item.files);
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes an item from the cache.
|
||||
* @param {string} key - The unique key identifying the cached item to delete.
|
||||
*/
|
||||
async delete(key) {
|
||||
// ensure database is created
|
||||
await this.createCacheDatabase();
|
||||
|
||||
// fail silently if the key does not exist
|
||||
try {
|
||||
await this.storage.remove(key);
|
||||
await this.blobStorage.remove(key);
|
||||
} catch (e) {
|
||||
console.error("Failed to delete cache item:", e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clears all items from the cache.
|
||||
*/
|
||||
async clear() {
|
||||
// ensure database is created
|
||||
await this.createCacheDatabase();
|
||||
|
||||
const allItems = await this.storage.getAll();
|
||||
for (let i = 0; i < allItems.length; i++) {
|
||||
await this.delete(allItems[i].key);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleans up the cache by removing old or excess items based on size and age constraints.
|
||||
*/
|
||||
async cleanup() {
|
||||
if (!this.enabled) return;
|
||||
|
||||
// ensure database is created
|
||||
await this.createCacheDatabase();
|
||||
|
||||
if (this.debug) console.log("[EJS Cache] Starting cache cleanup...");
|
||||
const cleanupStartTime = performance.now();
|
||||
|
||||
// get all items
|
||||
const allItems = await this.storage.getAll();
|
||||
const now = Date.now();
|
||||
|
||||
// sort items by lastAccessed (oldest first)
|
||||
allItems.sort((a, b) => a.lastAccessed - b.lastAccessed);
|
||||
|
||||
let currentSize = 0;
|
||||
let totalItems = allItems.length;
|
||||
const itemsToRemove = [];
|
||||
|
||||
// Calculate current total size
|
||||
for (let i = 0; i < allItems.length; i++) {
|
||||
const item = allItems[i];
|
||||
const itemSize = item.fileSize || 0;
|
||||
currentSize += itemSize;
|
||||
const ageMins = (now - item.lastAccessed) / (1000 * 60);
|
||||
|
||||
// Remove if too old OR if cache is over size limit and item is old enough
|
||||
if (ageMins > this.maxAgeMins || (currentSize > this.maxSizeMB * 1024 * 1024 && ageMins > this.minAgeMins)) {
|
||||
itemsToRemove.push({ key: item.key, size: itemSize, age: ageMins });
|
||||
currentSize -= itemSize;
|
||||
}
|
||||
}
|
||||
|
||||
// remove items from storage
|
||||
for (const item of itemsToRemove) {
|
||||
await this.delete(item.key);
|
||||
}
|
||||
|
||||
// remove orphaned blobs in blobStorage - here as a failsafe in case of previous incomplete deletions
|
||||
const blobKeys = await this.blobStorage.getKeys();
|
||||
for (const blobKey of blobKeys) {
|
||||
const existsInStorage = allItems.find(item => item.key === blobKey);
|
||||
if (!existsInStorage) {
|
||||
await this.blobStorage.remove(blobKey);
|
||||
}
|
||||
}
|
||||
|
||||
const cleanupTime = performance.now() - cleanupStartTime;
|
||||
const currentSizeMB = (currentSize / (1024 * 1024)).toFixed(2);
|
||||
const removedSizeMB = (itemsToRemove.reduce((sum, item) => sum + item.size, 0) / (1024 * 1024)).toFixed(2);
|
||||
|
||||
if (this.debug) console.log("[EJS Cache] Cleanup complete in " + cleanupTime.toFixed(2) + "ms - Removed " + itemsToRemove.length + "/" + totalItems + " items (" + removedSizeMB + "MB), " + currentSizeMB + "MB remaining");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* EJS_CacheItem
|
||||
* Represents a single cached item in the EJS_Cache system.
|
||||
* Contains metadata about the cached item. This class is an internal structure used by EJS_Cache.
|
||||
*/
|
||||
class EJS_CacheItem {
|
||||
/**
|
||||
* Creates an instance of EJS_CacheItem.
|
||||
* @param {string} key - Unique identifier for the cached item.
|
||||
* @param {EJS_FileItem[]} files - array of EJS_FileItem objects representing the files associated with this cache item.
|
||||
* @param {number} added - Timestamp (in milliseconds) when the item was added to the cache.
|
||||
* @param {string} type - The type of cached content (e.g., 'core', 'ROM', 'BIOS', 'decompressed').
|
||||
* @param {string} responseType - The response type used when downloading the content (e.g., 'arraybuffer', 'blob', 'text').
|
||||
* @param {string} filename - The original filename of the cached content.
|
||||
* @param {string} url - The URL from which the cached content was downloaded.
|
||||
* @param {number|null} cacheExpiry - Timestamp (in milliseconds) indicating when the cache item should expire.
|
||||
*/
|
||||
constructor(key, files, added, type = "unknown", responseType, filename, url, cacheExpiry) {
|
||||
this.key = key;
|
||||
this.files = files;
|
||||
this.added = added;
|
||||
this.lastAccessed = added;
|
||||
this.type = type;
|
||||
this.responseType = responseType;
|
||||
this.filename = filename;
|
||||
this.url = url;
|
||||
this.cacheExpiry = cacheExpiry;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates the total size of all files in this cache item.
|
||||
* @returns {number} - Total size in bytes.
|
||||
*/
|
||||
size() {
|
||||
let total = 0;
|
||||
for (let i = 0; i < this.files.length; i++) {
|
||||
if (this.files[i] && this.files[i].bytes && typeof this.files[i].bytes.byteLength === "number") {
|
||||
total += this.files[i].bytes.byteLength;
|
||||
}
|
||||
}
|
||||
return total;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* EJS_FileItem
|
||||
* Represents a single file stored in the cache. This class is an internal structure used by EJS_CacheItem.
|
||||
*/
|
||||
class EJS_FileItem {
|
||||
/**
|
||||
* Creates an instance of EJS_FileItem.
|
||||
* @param {string} filename - Name of the file.
|
||||
* @param {Uint8Array} bytes - Byte array representing the file's data.
|
||||
*/
|
||||
constructor(filename, bytes) {
|
||||
this.filename = filename;
|
||||
this.bytes = bytes;
|
||||
}
|
||||
}
|
||||
|
||||
window.EJS_Cache = EJS_Cache;
|
||||
window.EJS_CacheItem = EJS_CacheItem;
|
||||
window.EJS_FileItem = EJS_FileItem;
|
||||
window.EJS_Download = EJS_Download;
|
||||
@ -87,13 +87,13 @@ class EJSCompression {
|
||||
path = "compression/libunrar.js";
|
||||
obj = "rar";
|
||||
}
|
||||
const res = await this.EJS.downloadFile(path, null, false, { responseType: "text", method: "GET" });
|
||||
const res = await this.EJS.downloadFile(path, this.EJS.downloadType.support.name, null, false, { responseType: "text", method: "GET" }, false, this.EJS.downloadType.support.dontCache);
|
||||
if (res === -1) {
|
||||
this.EJS.startGameError(this.EJS.localization("Network Error"));
|
||||
return;
|
||||
}
|
||||
if (method === "rar") {
|
||||
const res2 = await this.EJS.downloadFile("compression/libunrar.wasm", null, false, { responseType: "arraybuffer", method: "GET" });
|
||||
const res2 = await this.EJS.downloadFile("compression/libunrar.wasm", this.EJS.downloadType.support.name, null, false, { responseType: "arraybuffer", method: "GET" }, false, this.EJS.downloadType.support.dontCache);
|
||||
if (res2 === -1) {
|
||||
this.EJS.startGameError(this.EJS.localization("Network Error"));
|
||||
return;
|
||||
@ -151,7 +151,7 @@ class EJSCompression {
|
||||
})
|
||||
resolve(blob);
|
||||
} else {
|
||||
const blob = new Blob([res.data], {
|
||||
const blob = new Blob([res.data.files[0].bytes], {
|
||||
type: "application/javascript"
|
||||
})
|
||||
resolve(blob);
|
||||
|
||||
1252
data/src/emulator.js
1252
data/src/emulator.js
File diff suppressed because it is too large
Load Diff
@ -1,7 +1,4 @@
|
||||
class GamepadHandler {
|
||||
gamepads;
|
||||
timeout;
|
||||
listeners;
|
||||
constructor() {
|
||||
this.buttonLabels = {
|
||||
0: 'BUTTON_1',
|
||||
|
||||
@ -1,7 +1,13 @@
|
||||
class EJS_STORAGE {
|
||||
constructor(dbName, storeName) {
|
||||
/**
|
||||
* @param {string} dbName
|
||||
* @param {string} storeName
|
||||
* @param {string[]?} indexes - Optional array of field names to create non-unique indexes on
|
||||
*/
|
||||
constructor(dbName, storeName, indexes = null) {
|
||||
this.dbName = dbName;
|
||||
this.storeName = storeName;
|
||||
this.indexes = indexes;
|
||||
}
|
||||
addFileToDB(key, add) {
|
||||
(async () => {
|
||||
@ -17,73 +23,85 @@ class EJS_STORAGE {
|
||||
this.put("?EJS_KEYS!", keys);
|
||||
})();
|
||||
}
|
||||
get(key) {
|
||||
getObjectStore(mode = "readwrite") {
|
||||
return new Promise((resolve, reject) => {
|
||||
if (!window.indexedDB) return resolve();
|
||||
let openRequest = indexedDB.open(this.dbName, 1);
|
||||
openRequest.onerror = () => resolve();
|
||||
openRequest.onsuccess = () => {
|
||||
let db = openRequest.result;
|
||||
let transaction = db.transaction([this.storeName], "readwrite");
|
||||
let transaction = db.transaction(this.storeName, mode);
|
||||
let objectStore = transaction.objectStore(this.storeName);
|
||||
let request = objectStore.get(key);
|
||||
request.onsuccess = (e) => {
|
||||
resolve(request.result);
|
||||
};
|
||||
request.onerror = () => resolve();
|
||||
resolve(objectStore);
|
||||
};
|
||||
openRequest.onupgradeneeded = () => {
|
||||
let db = openRequest.result;
|
||||
let objectStore;
|
||||
if (!db.objectStoreNames.contains(this.storeName)) {
|
||||
db.createObjectStore(this.storeName);
|
||||
};
|
||||
objectStore = db.createObjectStore(this.storeName);
|
||||
} else {
|
||||
objectStore = openRequest.transaction.objectStore(this.storeName);
|
||||
}
|
||||
// Create indexes if provided
|
||||
if (this.indexes && Array.isArray(this.indexes)) {
|
||||
for (const idx of this.indexes) {
|
||||
if (!objectStore.indexNames.contains(idx)) {
|
||||
objectStore.createIndex(idx, idx, { unique: false });
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
});
|
||||
}
|
||||
put(key, data) {
|
||||
return new Promise((resolve, reject) => {
|
||||
if (!window.indexedDB) return resolve();
|
||||
let openRequest = indexedDB.open(this.dbName, 1);
|
||||
openRequest.onerror = () => {};
|
||||
openRequest.onsuccess = () => {
|
||||
let db = openRequest.result;
|
||||
let transaction = db.transaction([this.storeName], "readwrite");
|
||||
let objectStore = transaction.objectStore(this.storeName);
|
||||
let request = objectStore.put(data, key);
|
||||
/**
|
||||
* Get a value by key or by index.
|
||||
* @param {string|any} key - The key or index value to search for
|
||||
* @param {string|null} indexName - Optional index name to search by
|
||||
* @returns {Promise<any>}
|
||||
*/
|
||||
get(key, indexName = null) {
|
||||
return new Promise(async (resolve, reject) => {
|
||||
const objectStore = await this.getObjectStore();
|
||||
if (!objectStore) return resolve();
|
||||
if (!indexName) {
|
||||
// Default: get by primary key
|
||||
let request = objectStore.get(key);
|
||||
request.onsuccess = () => resolve(request.result);
|
||||
request.onerror = () => resolve();
|
||||
request.onsuccess = () => {
|
||||
this.addFileToDB(key, true);
|
||||
} else {
|
||||
// Get by index
|
||||
try {
|
||||
const index = objectStore.index(indexName);
|
||||
let req = index.get(key);
|
||||
req.onsuccess = () => resolve(req.result);
|
||||
req.onerror = () => resolve();
|
||||
} catch (e) {
|
||||
// Index not found
|
||||
resolve();
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
put(key, data) {
|
||||
return new Promise(async (resolve, reject) => {
|
||||
const objectStore = await this.getObjectStore();
|
||||
if (!objectStore) return resolve();
|
||||
let request = objectStore.put(data, key);
|
||||
request.onerror = () => resolve();
|
||||
request.onsuccess = () => {
|
||||
this.addFileToDB(key, true);
|
||||
resolve();
|
||||
};
|
||||
openRequest.onupgradeneeded = () => {
|
||||
let db = openRequest.result;
|
||||
if (!db.objectStoreNames.contains(this.storeName)) {
|
||||
db.createObjectStore(this.storeName);
|
||||
};
|
||||
};
|
||||
})
|
||||
});
|
||||
}
|
||||
remove(key) {
|
||||
return new Promise((resolve, reject) => {
|
||||
if (!window.indexedDB) return resolve();
|
||||
let openRequest = indexedDB.open(this.dbName, 1);
|
||||
openRequest.onerror = () => {};
|
||||
openRequest.onsuccess = () => {
|
||||
let db = openRequest.result;
|
||||
let transaction = db.transaction([this.storeName], "readwrite");
|
||||
let objectStore = transaction.objectStore(this.storeName);
|
||||
let request2 = objectStore.delete(key);
|
||||
this.addFileToDB(key, false);
|
||||
request2.onsuccess = () => resolve();
|
||||
request2.onerror = () => {};
|
||||
};
|
||||
openRequest.onupgradeneeded = () => {
|
||||
let db = openRequest.result;
|
||||
if (!db.objectStoreNames.contains(this.storeName)) {
|
||||
db.createObjectStore(this.storeName);
|
||||
};
|
||||
};
|
||||
return new Promise(async (resolve, reject) => {
|
||||
const objectStore = await this.getObjectStore();
|
||||
if (!objectStore) return resolve();
|
||||
let request = objectStore.delete(key);
|
||||
this.addFileToDB(key, false);
|
||||
request.onsuccess = () => resolve();
|
||||
request.onerror = () => {};
|
||||
});
|
||||
}
|
||||
getSizes() {
|
||||
@ -100,6 +118,28 @@ class EJS_STORAGE {
|
||||
resolve(rv);
|
||||
})
|
||||
}
|
||||
getAll() {
|
||||
return new Promise(async (resolve, reject) => {
|
||||
if (!window.indexedDB) return resolve([]);
|
||||
const keys = await this.get("?EJS_KEYS!");
|
||||
if (!keys) return resolve([]);
|
||||
let rv = [];
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
const result = await this.get(keys[i]);
|
||||
if (!result) continue;
|
||||
rv.push(result);
|
||||
}
|
||||
resolve(rv);
|
||||
});
|
||||
}
|
||||
getKeys() {
|
||||
return new Promise(async (resolve, reject) => {
|
||||
if (!window.indexedDB) return resolve([]);
|
||||
const keys = await this.get("?EJS_KEYS!");
|
||||
if (!keys) return resolve([]);
|
||||
resolve(keys);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
class EJS_DUMMYSTORAGE {
|
||||
|
||||
42
data/src/utils.js
Normal file
42
data/src/utils.js
Normal file
@ -0,0 +1,42 @@
|
||||
/**
|
||||
* EJS Utility Functions
|
||||
*/
|
||||
class EJS_UTILS {
|
||||
/**
|
||||
* Computes a simple hash of the given data array.
|
||||
* @param {Uint8Array} dataArray
|
||||
* @returns {number} The computed hash.
|
||||
*/
|
||||
simpleHash(dataArray) {
|
||||
let hash = 0;
|
||||
for (let i = 0; i < dataArray.length; i++) {
|
||||
hash = ((hash << 5) - hash + dataArray[i]) & 0xffffffff;
|
||||
}
|
||||
return hash;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cyrb53 hash function adapted for buffers.
|
||||
* @param {*} charBuffer
|
||||
* @param {*} seed
|
||||
* @returns {string} Hexadecimal representation of the hash.
|
||||
*/
|
||||
async cyrb53(charBuffer, seed = 0) {
|
||||
// https://stackoverflow.com/questions/7616461
|
||||
// Modified to accept a buffer instead of a string and return hex instead of an int
|
||||
let h1 = 0xdeadbeef ^ seed, h2 = 0x41c6ce57 ^ seed;
|
||||
for(let i = 0, ch; i < charBuffer.length; i++) {
|
||||
ch = charBuffer[i];
|
||||
h1 = Math.imul(h1 ^ ch, 2654435761);
|
||||
h2 = Math.imul(h2 ^ ch, 1597334677);
|
||||
}
|
||||
h1 = Math.imul(h1 ^ (h1 >>> 16), 2246822507);
|
||||
h1 ^= Math.imul(h2 ^ (h2 >>> 13), 3266489909);
|
||||
h2 = Math.imul(h2 ^ (h2 >>> 16), 2246822507);
|
||||
h2 ^= Math.imul(h1 ^ (h1 >>> 13), 3266489909);
|
||||
|
||||
// Cyrb53 is a 53-bit hash; we need 14 hex characters to represent it, and the first char will
|
||||
// always be 0 or 1 (since it is only 1 bit)
|
||||
return (4294967296 * (2097151 & h2) + (h1 >>> 0)).toString(16).padStart(14, "0");
|
||||
};
|
||||
}
|
||||
416
docs/CACHING.md
Normal file
416
docs/CACHING.md
Normal file
@ -0,0 +1,416 @@
|
||||
# EmulatorJS Caching System
|
||||
|
||||
## Overview
|
||||
|
||||
EmulatorJS implements a sophisticated unified caching system designed to optimize performance by minimizing redundant downloads and decompression operations. The system uses IndexedDB-based storage with intelligent cache validation to provide fast loading times for emulator cores, ROMs, BIOS files, and other assets.
|
||||
|
||||
## High-Level Architecture
|
||||
|
||||
### Unified Caching Strategy
|
||||
|
||||
The caching system consists of two main components:
|
||||
|
||||
1. **EJS_Download Manager**
|
||||
- Handles file downloads with smart caching
|
||||
- Validates cached content using HTTP headers (Cache-Control, Expires, Last-Modified)
|
||||
- Automatically decompresses archives (ZIP, 7Z, RAR)
|
||||
- Provides progress callbacks for download and decompression operations
|
||||
|
||||
2. **EJS_Cache Storage**
|
||||
- Custom IndexedDB-based storage for cached content
|
||||
- Stores both metadata and file blobs in separate object stores
|
||||
- Implements LRU (Least Recently Used) eviction policy
|
||||
- Configurable size limits and age-based expiration
|
||||
|
||||
### Cache Flow
|
||||
|
||||
```
|
||||
Download Request → URL-based Cache Check → Cache Expiry Valid?
|
||||
↓ ↓ Yes
|
||||
HEAD Request Check Return Cached Content
|
||||
↓ Modified
|
||||
Download File → Decompress (if needed) → Store in Cache
|
||||
↓
|
||||
Return Content to Application
|
||||
```
|
||||
|
||||
## Detailed Implementation
|
||||
|
||||
### Download Manager (EJS_Download)
|
||||
|
||||
The `EJS_Download` class handles all file downloads with intelligent caching:
|
||||
|
||||
#### Cache Validation Strategy
|
||||
|
||||
1. **Check URL-based cache**: Downloads are cached by URL, enabling reuse across sessions
|
||||
2. **Validate cache expiry**:
|
||||
- Uses `Cache-Control: max-age` header from server
|
||||
- Falls back to `Expires` header if present
|
||||
- Defaults to 5 days if no cache headers provided
|
||||
3. **Conditional validation**: If cache expired, performs HEAD request to check `Last-Modified`
|
||||
4. **Smart fallback**: Uses cached version if HEAD request fails or file unchanged
|
||||
|
||||
#### Download Features
|
||||
|
||||
- **Progress tracking**: Real-time callbacks for download and decompression progress
|
||||
- **Automatic decompression**: Detects and extracts ZIP, 7Z, and RAR archives
|
||||
- **Flexible response types**: Supports arraybuffer, text, blob, and other response types
|
||||
- **Timeout handling**: Configurable request timeouts with abort controller
|
||||
- **Force extraction**: Optional parameter to force decompression regardless of file extension
|
||||
|
||||
### Cache Storage (EJS_Cache)
|
||||
|
||||
#### Storage Backend
|
||||
- **Technology**: IndexedDB via custom `EJS_STORAGE` wrapper
|
||||
- **Database**: Configurable database name (typically game-specific)
|
||||
- **Object Stores**:
|
||||
- `cache`: Stores metadata (key, fileSize, timestamps, type, filename, URL, expiry)
|
||||
- `blobs`: Stores actual file data separately for better performance
|
||||
- **Indexes**: Type and URL indexes for efficient lookups
|
||||
- **Key Structure**: `Obj-{hash}-{size}` format for content-based keys, or URL for download caching
|
||||
|
||||
#### Cache Key Generation
|
||||
```javascript
|
||||
// Hash calculation for cache key (content-based)
|
||||
generateCacheKey(dataArray) {
|
||||
let hash = this.utils.simpleHash(dataArray);
|
||||
const compressionCacheKey = "Obj-" + hash + "-" + dataArray.length;
|
||||
return compressionCacheKey;
|
||||
}
|
||||
```
|
||||
|
||||
#### Cache Configuration
|
||||
- **Default Size Limit**: 4GB (4096 MB)
|
||||
- **Default Max Age**: 5 days (7200 minutes)
|
||||
- **Min Age**: 1 hour or 10% of max age (whichever is greater)
|
||||
- **Storage Location**: Browser's IndexedDB
|
||||
- **Cleanup Policy**:
|
||||
- LRU (Least Recently Used) eviction when size exceeded
|
||||
- Age-based removal for items older than maxAge
|
||||
- Automatic cleanup on first cache access per session
|
||||
- Orphaned blob cleanup as failsafe
|
||||
|
||||
#### Cache Item Structure
|
||||
```javascript
|
||||
class EJS_CacheItem {
|
||||
constructor(key, files, added, type, responseType, filename, url, cacheExpiry) {
|
||||
this.key = key; // Unique identifier
|
||||
this.files = files; // Array of EJS_FileItem objects
|
||||
this.added = added; // Timestamp when added to cache
|
||||
this.lastAccessed = added; // Timestamp of last access
|
||||
this.type = type; // Type: 'core', 'ROM', 'BIOS', etc.
|
||||
this.responseType = responseType; // Original response type
|
||||
this.filename = filename; // Original filename
|
||||
this.url = url; // Source URL
|
||||
this.cacheExpiry = cacheExpiry; // Expiry timestamp (from HTTP headers)
|
||||
}
|
||||
|
||||
size() {
|
||||
// Calculates total size of all files
|
||||
}
|
||||
}
|
||||
|
||||
class EJS_FileItem {
|
||||
constructor(filename, bytes) {
|
||||
this.filename = filename; // Filename (original or from archive)
|
||||
this.bytes = bytes; // Uint8Array of file content
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### File Type Handling
|
||||
|
||||
#### All File Types
|
||||
The unified caching system handles all file types through the same mechanism:
|
||||
|
||||
- **Download**: Managed by `EJS_Download` with URL-based caching
|
||||
- **Cache validation**: Automatic expiry checking and conditional requests
|
||||
- **Decompression**: Automatic extraction of .zip, .7z, and .rar files
|
||||
- **Storage**: Metadata and blobs stored in separate IndexedDB object stores
|
||||
- **Retrieval**: URL or content-hash based lookups with index support
|
||||
|
||||
#### Archive Handling
|
||||
- **Automatic detection**: File extension (.zip, .7z, .rar) triggers decompression
|
||||
- **Force extraction**: `forceExtract` parameter overrides extension check
|
||||
- **Multiple files**: Each file in archive stored as separate `EJS_FileItem`
|
||||
- **Progress tracking**: Real-time progress callbacks during extraction
|
||||
|
||||
#### Response Type Support
|
||||
- **arraybuffer**: Default, used for binary files and archives
|
||||
- **text**: Automatically encoded to UTF-8 Uint8Array for storage
|
||||
- **blob**: Converted to Uint8Array for consistent storage
|
||||
- **others**: Converted to string then encoded for storage
|
||||
|
||||
### Cache Operations
|
||||
|
||||
#### Download with Caching
|
||||
The `downloadFile()` method handles the complete download and caching workflow:
|
||||
|
||||
```javascript
|
||||
async downloadFile(url, type, options) {
|
||||
// 1. Check cache by URL
|
||||
const cached = await this.storageCache.get(url, false, "url");
|
||||
|
||||
// 2. Validate cache expiry
|
||||
if (cached && cached.cacheExpiry > Date.now()) {
|
||||
return cached; // Cache valid
|
||||
}
|
||||
|
||||
// 3. Perform conditional validation with HEAD request
|
||||
const headResp = await fetch(url, { method: "HEAD" });
|
||||
const lastModified = headResp.headers.get("Last-Modified");
|
||||
if (lastModified && Date.parse(lastModified) <= cached.added) {
|
||||
return cached; // File not modified
|
||||
}
|
||||
|
||||
// 4. Download file with progress tracking
|
||||
const response = await fetch(url);
|
||||
const data = await readWithProgress(response);
|
||||
|
||||
// 5. Extract cache expiry from headers
|
||||
const cacheExpiry = parseCacheHeaders(response.headers);
|
||||
|
||||
// 6. Decompress if needed
|
||||
if (isArchive(filename)) {
|
||||
files = await decompress(data);
|
||||
}
|
||||
|
||||
// 7. Store in cache
|
||||
const cacheItem = new EJS_CacheItem(key, files, Date.now(),
|
||||
type, responseType, filename, url, cacheExpiry);
|
||||
await this.storageCache.put(cacheItem);
|
||||
|
||||
return cacheItem;
|
||||
}
|
||||
```
|
||||
|
||||
#### Cache Management Operations
|
||||
|
||||
- **get(key, metadataOnly, indexName)**: Retrieve item by key or index (URL, type)
|
||||
- **put(item)**: Store item with automatic size management and LRU eviction
|
||||
- **delete(key)**: Remove item and its blobs from cache
|
||||
- **clear()**: Remove all cached items
|
||||
- **cleanup()**: Remove old/excess items based on age and size constraints
|
||||
- **getKeys()**: List all cache keys
|
||||
- **getSizes()**: Get size information for all cached items
|
||||
|
||||
### Performance Optimizations
|
||||
|
||||
#### Separate Metadata and Blob Storage
|
||||
The cache uses two separate IndexedDB object stores:
|
||||
- **Metadata store**: Small, fast lookups for cache hits/validation
|
||||
- **Blob store**: Large file data loaded only when needed
|
||||
- **Benefit**: Faster cache lookups without loading large blobs into memory
|
||||
|
||||
#### Progress Tracking
|
||||
All long-running operations provide progress callbacks:
|
||||
```javascript
|
||||
downloadFile(url, type, {
|
||||
onProgress: (status, percent, loaded, total) => {
|
||||
// status: "downloading" | "decompressing" | "complete"
|
||||
console.log(`${status}: ${percent}%`);
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
#### Memory Management
|
||||
- **Chunked downloads**: Large files downloaded in chunks with progress tracking
|
||||
- **LRU eviction**: Oldest accessed items removed first when size limit exceeded
|
||||
- **Automatic cleanup**: Runs on first cache access each session
|
||||
- **Orphan removal**: Failsafe cleanup removes blobs without metadata
|
||||
- **Size-based eviction**: Pre-emptive removal when adding new items would exceed limit
|
||||
|
||||
### Cache Validation and Invalidation
|
||||
|
||||
#### Multi-Level Validation Strategy
|
||||
The system uses a tiered approach to validate cached content:
|
||||
|
||||
1. **Cache Expiry Check** (fastest)
|
||||
- Checks `cacheExpiry` timestamp from HTTP headers
|
||||
- Avoids network requests for valid cached items
|
||||
- Default: 5 days from server's Cache-Control/Expires headers
|
||||
|
||||
2. **Conditional HEAD Request** (medium)
|
||||
- Performed only if cache expired or no expiry set
|
||||
- Compares server's Last-Modified with cached item's `added` timestamp
|
||||
- Reuses cache if file unchanged on server
|
||||
|
||||
3. **Full Download** (slowest)
|
||||
- Only when file modified or validation fails
|
||||
- Replaces existing cache entry with new version
|
||||
|
||||
#### Client-Side Management
|
||||
- **Automatic cleanup**: On first cache access per session
|
||||
- **Manual clearing**: Via cache management UI
|
||||
- **Selective removal**: Delete individual items by key
|
||||
- **Index-based queries**: Find items by URL or type
|
||||
|
||||
### Error Handling
|
||||
|
||||
#### Network Failures
|
||||
- **Graceful fallback**: Uses expired cache if server unavailable
|
||||
- **Timeout handling**: Configurable request timeout with abort controller
|
||||
- **Error propagation**: Clear error messages for debugging
|
||||
|
||||
#### Cache Failures
|
||||
- **Silent degradation**: Cache disabled if IndexedDB unavailable
|
||||
- **Orphan cleanup**: Removes blobs without metadata entries
|
||||
- **Error recovery**: Falls back to fresh downloads when cache operations fail
|
||||
|
||||
#### Decompression Errors
|
||||
- **Error propagation**: Clear error messages from decompression library
|
||||
- **Fallback**: Returns original data if decompression fails
|
||||
- **Progress updates**: Status callbacks during decompression
|
||||
|
||||
### Storage Architecture
|
||||
|
||||
#### Unified Database Design
|
||||
The current implementation uses a unified approach:
|
||||
- **Single database**: One IndexedDB database per game/instance
|
||||
- **Two object stores**:
|
||||
- `cache`: Metadata with indexed fields (type, url)
|
||||
- `blobs`: Large binary file data
|
||||
- **Benefits**:
|
||||
- Easier management and versioning
|
||||
- Atomic transactions across stores
|
||||
- Better performance than separate databases
|
||||
- Simpler cleanup and maintenance
|
||||
|
||||
#### Browser Compatibility
|
||||
- **IndexedDB Required**: Core functionality depends on IndexedDB support
|
||||
- **Fallback mode**: `EJS_DUMMYSTORAGE` for browsers without IndexedDB
|
||||
- **Feature detection**: Automatic detection and graceful degradation
|
||||
|
||||
## Configuration Options
|
||||
|
||||
### EJS_Cache Constructor Parameters
|
||||
```javascript
|
||||
new EJS_Cache(enabled, databaseName, maxSizeMB, maxAgeMins, debug)
|
||||
```
|
||||
- `enabled`: Enable/disable caching (default: true)
|
||||
- `databaseName`: IndexedDB database name (game-specific)
|
||||
- `maxSizeMB`: Maximum cache size in megabytes (default: 4096)
|
||||
- `maxAgeMins`: Maximum item age in minutes (default: 7200 = 5 days)
|
||||
- `debug`: Enable debug logging (default: false)
|
||||
|
||||
### EJS_Download Constructor Parameters
|
||||
```javascript
|
||||
new EJS_Download(storageCache, EJS)
|
||||
```
|
||||
- `storageCache`: EJS_Cache instance for storage (null disables caching)
|
||||
- `EJS`: Main EmulatorJS instance for integration
|
||||
|
||||
### downloadFile() Options
|
||||
- `url`: Source URL (required)
|
||||
- `type`: Content type (e.g., "ROM", "CORE", "BIOS")
|
||||
- `method`: HTTP method (default: "GET")
|
||||
- `headers`: Request headers object
|
||||
- `body`: Request body for POST/PUT
|
||||
- `onProgress`: Progress callback function
|
||||
- `onComplete`: Completion callback function
|
||||
- `timeout`: Request timeout in ms (default: 30000)
|
||||
- `responseType`: Response type (default: "arraybuffer")
|
||||
- `forceExtract`: Force decompression (default: false)
|
||||
- `dontCache`: Skip caching for this download (default: false)
|
||||
|
||||
## Best Practices
|
||||
|
||||
### For Developers
|
||||
1. **Set proper HTTP headers**: Use Cache-Control/Expires/Last-Modified on servers
|
||||
2. **Monitor cache size**: Regular cleanup prevents storage quota issues
|
||||
3. **Use progress callbacks**: Provide user feedback during long operations
|
||||
4. **Handle failures gracefully**: Always provide fallback for cache failures
|
||||
5. **Enable debug mode**: Use debug flag during development for detailed logging
|
||||
6. **Test without cache**: Use `dontCache` option to verify download logic
|
||||
7. **Optimize file delivery**: Smaller files and good compression improve cache efficiency
|
||||
|
||||
### For Server Configuration
|
||||
1. **Cache-Control headers**: Set appropriate max-age for assets
|
||||
2. **Last-Modified headers**: Enable conditional requests
|
||||
3. **Compression**: Pre-compress large assets (gzip, brotli)
|
||||
4. **CDN usage**: Leverage CDN caching alongside client-side cache
|
||||
|
||||
### For Users
|
||||
1. **Monitor storage usage**: Check browser storage settings periodically
|
||||
2. **Clear cache if needed**: Use cache management UI for cleanup
|
||||
3. **Update browser**: Newer browsers have better IndexedDB performance
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
#### Cache Not Working
|
||||
- Check IndexedDB support: `!!window.indexedDB`
|
||||
- Verify storage quota: Check browser storage settings
|
||||
- Enable debug mode: Set `debug: true` in EJS_Cache constructor
|
||||
- Check console: Look for cache operation logs
|
||||
|
||||
#### Slow First Load
|
||||
- Expected behavior: First load downloads and caches
|
||||
- Subsequent loads: Should be much faster with cache hits
|
||||
- Monitor progress: Use onProgress callback for user feedback
|
||||
|
||||
#### Storage Quota Exceeded
|
||||
- Automatic cleanup: Cache runs cleanup on startup
|
||||
- Manual cleanup: Use cache management UI
|
||||
- Reduce maxSizeMB: Lower the cache size limit
|
||||
- Clear other data: Free up browser storage space
|
||||
|
||||
#### Stale Content
|
||||
- Check cache expiry: Default is 5 days
|
||||
- Adjust maxAgeMins: Lower for more frequent updates
|
||||
- Force refresh: Clear cache for specific items
|
||||
- Server headers: Verify Cache-Control/Expires headers
|
||||
|
||||
### Debug Information
|
||||
|
||||
Enable debug mode for detailed logging:
|
||||
```javascript
|
||||
const cache = new EJS_Cache(true, "my-game", 4096, 7200, true);
|
||||
```
|
||||
|
||||
Debug logs include:
|
||||
- Cache initialization settings
|
||||
- Startup cleanup results (items removed, size freed, time taken)
|
||||
- Cache hit/miss information
|
||||
- Download progress and timing
|
||||
- Decompression progress
|
||||
- Error details and stack traces
|
||||
|
||||
## Cache Manager UI
|
||||
|
||||
The Cache Manager provides a user interface for viewing and managing cached items. It displays information about cached content:
|
||||
|
||||
### Display Information
|
||||
- **Filename**: Original filename of the cached content
|
||||
- **Type**: Content type (ROM, CORE, BIOS, etc.)
|
||||
- **Size**: Total size of all files in the cache item
|
||||
- **URL**: Source URL where content was downloaded from
|
||||
- **Last Accessed**: Timestamp of last access with relative time
|
||||
- **Added**: Timestamp when item was first cached
|
||||
- **Cache Expiry**: When the cache entry expires (from HTTP headers)
|
||||
|
||||
### Management Operations
|
||||
- **View Details**: Inspect cache item metadata and file list
|
||||
- **Delete Item**: Remove individual cache entries
|
||||
- **Cleanup Now**: Run cleanup to remove old/excess items based on age and size
|
||||
- **Clear All**: Remove all cached items from storage
|
||||
|
||||
### Cache Statistics
|
||||
- **Total Items**: Number of cached items
|
||||
- **Total Size**: Combined size of all cached content
|
||||
- **Hit Rate**: Percentage of cache hits vs. misses (if tracked)
|
||||
- **Oldest Item**: Age of least recently accessed item
|
||||
|
||||
## Summary
|
||||
|
||||
This unified caching system provides:
|
||||
- **Intelligent cache validation** using HTTP headers (Cache-Control, Expires, Last-Modified)
|
||||
- **Automatic decompression** and caching of extracted files
|
||||
- **Efficient storage** with separate metadata and blob stores
|
||||
- **LRU eviction** with configurable size and age limits
|
||||
- **Progress tracking** for downloads and decompression
|
||||
- **Graceful degradation** when cache unavailable
|
||||
- **Debug logging** for troubleshooting
|
||||
|
||||
The system significantly improves EmulatorJS performance by eliminating redundant downloads and decompression operations while maintaining data freshness through smart validation.
|
||||
23
eslint.config.js
Normal file
23
eslint.config.js
Normal file
@ -0,0 +1,23 @@
|
||||
export default [
|
||||
{
|
||||
files: ["**/*.js"],
|
||||
languageOptions: {
|
||||
ecmaVersion: 2020,
|
||||
sourceType: "module",
|
||||
globals: {
|
||||
console: "readonly",
|
||||
window: "readonly",
|
||||
document: "readonly",
|
||||
navigator: "readonly",
|
||||
fetch: "readonly",
|
||||
localStorage: "readonly",
|
||||
sessionStorage: "readonly"
|
||||
}
|
||||
},
|
||||
rules: {
|
||||
"no-var": "warn",
|
||||
"prefer-const": "warn",
|
||||
"prefer-arrow-callback": "warn"
|
||||
}
|
||||
}
|
||||
];
|
||||
158
index.html
158
index.html
@ -1,9 +1,10 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>EmulatorJS</title>
|
||||
<link rel = icon href = docs/favicon.ico sizes = "16x16 32x32 48x48 64x64" type = image/vnd.microsoft.icon>
|
||||
<meta name = viewport content = "width = device-width, initial-scale = 1">
|
||||
<link rel="icon" href="docs/favicon.ico" sizes="16x16 32x32 48x48 64x64" type="image/vnd.microsoft.icon">
|
||||
<meta name="viewport" content="width = device-width, initial-scale = 1">
|
||||
<style>
|
||||
body, html {
|
||||
height: 100%;
|
||||
@ -49,6 +50,35 @@
|
||||
color: #ddd
|
||||
}
|
||||
|
||||
#advanced {
|
||||
color: #aaa;
|
||||
text-align: left;
|
||||
width: 30em;
|
||||
max-width: 80%;
|
||||
background-color: #333;
|
||||
border-radius: 0.4em;
|
||||
border: 2px solid #555;
|
||||
position: relative;
|
||||
flex-direction: column;
|
||||
transition-duration: 0.2s;
|
||||
overflow: hidden;
|
||||
font-family: monospace;
|
||||
font-weight: bold;
|
||||
font-size: 20px;
|
||||
margin: 5px;
|
||||
padding: 10px;
|
||||
}
|
||||
|
||||
#advancedOptionsBox {
|
||||
margin-top: 10px;
|
||||
font-size: 14px;
|
||||
margin-left: 20px;
|
||||
}
|
||||
|
||||
.advancedOptionsBoxRow {
|
||||
margin-top: 10px;
|
||||
}
|
||||
|
||||
#input {
|
||||
cursor: pointer;
|
||||
position: absolute;
|
||||
@ -107,34 +137,132 @@
|
||||
<input type="file" id ="input" title="Upload" />
|
||||
Drag ROM file or click here
|
||||
</div>
|
||||
<div id="advanced">
|
||||
<div id="advancedToggleBox">
|
||||
<input type="checkbox" id="advancedToggle" />
|
||||
<label for="advancedToggle"> Show Advanced Options</label>
|
||||
</div>
|
||||
<div id="advancedOptionsBox">
|
||||
<div class="advancedOptionsBoxRow">
|
||||
<input type="file" id ="inputAdvanced" title="BIOS" />
|
||||
<label for="inputAdvanced"> Upload BIOS file (if required)</label>
|
||||
</div>
|
||||
<div class="advancedOptionsBoxRow">
|
||||
<input type="checkbox" id="debugToggle" />
|
||||
<label for="debugToggle"> Enable Debug Mode</label>
|
||||
</div>
|
||||
<div id="advancedOptionsBoxThreads" class="advancedOptionsBoxRow">
|
||||
<input type="checkbox" id="threadsToggle" />
|
||||
<label for="threadsToggle"> Enable Threads</label>
|
||||
</div>
|
||||
<div class="advancedOptionsBoxRow">
|
||||
<span>Browser mode: </span>
|
||||
<input type="radio" id="browserModeAuto" name="browserMode" value="auto">
|
||||
<label for="browserModeAuto"> Auto </label>
|
||||
<input type="radio" id="browserModeDesktop" name="browserMode" value="desktop">
|
||||
<label for="browserModeDesktop"> Desktop </label>
|
||||
<input type="radio" id="browserModeMobile" name="browserMode" value="mobile">
|
||||
<label for="browserModeMobile"> Mobile </label>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
let enableDebug = false;
|
||||
let enableThreads = false;
|
||||
let browserMode;
|
||||
|
||||
const biosFileInput = document.getElementById("inputAdvanced");
|
||||
|
||||
const advancedToggle = document.getElementById("advancedToggle");
|
||||
const advanced = document.getElementById("advanced");
|
||||
const advancedOptionsBox = document.getElementById("advancedOptionsBox");
|
||||
advancedOptionsBox.style.display = "none";
|
||||
advancedToggle.addEventListener("change", (event) => {
|
||||
if (event.target.checked) {
|
||||
advancedOptionsBox.style.display = "block";
|
||||
} else {
|
||||
advancedOptionsBox.style.display = "none";
|
||||
}
|
||||
});
|
||||
|
||||
const queryString = window.location.search;
|
||||
const urlParams = new URLSearchParams(queryString);
|
||||
const debugToggle = document.getElementById("debugToggle");
|
||||
debugToggle.addEventListener("change", (event) => {
|
||||
enableDebug = event.target.checked;
|
||||
console.log(`Debug is ${enableDebug ? "enabled" : "disabled"}`);
|
||||
});
|
||||
const threadsToggleAvailable = document.getElementById("advancedOptionsBoxThreads");
|
||||
const threadsToggle = document.getElementById("threadsToggle");
|
||||
threadsToggle.addEventListener("change", (event) => {
|
||||
enableThreads = event.target.checked;
|
||||
console.log(`Threads are ${enableThreads ? "enabled" : "disabled"}`);
|
||||
});
|
||||
if (parseInt(urlParams.get("debug")) === 1 || urlParams.get("debug") === "true") {
|
||||
enableDebug = true;
|
||||
debugToggle.checked = true;
|
||||
console.log("Debug is enabled");
|
||||
} else {
|
||||
debugToggle.checked = false;
|
||||
console.log("Debug is disabled");
|
||||
}
|
||||
|
||||
if (parseInt(urlParams.get("threads")) === 1 || urlParams.get("threads") === "true") {
|
||||
if (window.SharedArrayBuffer) {
|
||||
if (window.SharedArrayBuffer) {
|
||||
threadsToggleAvailable.style.display = "block";
|
||||
if (parseInt(urlParams.get("threads")) === 1 || urlParams.get("threads") === "true") {
|
||||
enableThreads = true;
|
||||
threadsToggle.checked = true;
|
||||
console.log("Threads are enabled");
|
||||
} else {
|
||||
console.warn("Threads are disabled as SharedArrayBuffer is not available. Threads requires two headers to be set when sending you html page. See https://stackoverflow.com/a/68630724");
|
||||
threadsToggle.checked = false;
|
||||
console.log("Threads are disabled");
|
||||
}
|
||||
} else {
|
||||
console.log("Threads are disabled");
|
||||
threadsToggleAvailable.style.display = "none";
|
||||
console.warn("Threads are disabled as SharedArrayBuffer is not available. Threads requires two headers to be set when sending you html page. See https://stackoverflow.com/a/68630724");
|
||||
}
|
||||
|
||||
const browserModeAuto = document.getElementById("browserModeAuto");
|
||||
browserModeAuto.addEventListener("change", (event) => {
|
||||
if (event.target.checked) {
|
||||
browserMode = undefined;
|
||||
console.log("Browser mode set to auto");
|
||||
}
|
||||
});
|
||||
const browserModeDesktop = document.getElementById("browserModeDesktop");
|
||||
browserModeDesktop.addEventListener("change", (event) => {
|
||||
if (event.target.checked) {
|
||||
browserMode = "desktop";
|
||||
console.log("Browser mode set to desktop");
|
||||
}
|
||||
});
|
||||
const browserModeMobile = document.getElementById("browserModeMobile");
|
||||
browserModeMobile.addEventListener("change", (event) => {
|
||||
if (event.target.checked) {
|
||||
browserMode = "mobile";
|
||||
console.log("Browser mode set to mobile");
|
||||
}
|
||||
});
|
||||
if (urlParams.get("browserMode")) {
|
||||
browserMode = urlParams.get("browserMode");
|
||||
switch (browserMode) {
|
||||
case 1:
|
||||
case "1":
|
||||
case "mobile":
|
||||
browserModeMobile.checked = true;
|
||||
break;
|
||||
case 2:
|
||||
case "2":
|
||||
case "desktop":
|
||||
browserModeDesktop.checked = true;
|
||||
break;
|
||||
default:
|
||||
browserModeAuto.checked = true;
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
browserModeAuto.checked = true;
|
||||
}
|
||||
|
||||
if (urlParams.get("rom")) {
|
||||
@ -146,6 +274,8 @@
|
||||
const url = upload ? input.files[0] : `roms/${file}`;
|
||||
const parts = upload ? input.files[0].name.split(".") : file.split(".");
|
||||
|
||||
const biosUrl = biosFileInput.files.length > 0 ? biosFileInput.files[0] : "";
|
||||
|
||||
const core = await (async (ext) => {
|
||||
if (["fds", "nes", "unif", "unf"].includes(ext))
|
||||
return "nes"
|
||||
@ -208,6 +338,7 @@
|
||||
"Commodore VIC20": "vice_xvic",
|
||||
"Commodore Plus/4": "vice_xplus4",
|
||||
"Commodore PET": "vice_xpet",
|
||||
"Amiga": "puae"
|
||||
}
|
||||
|
||||
if (enableThreads) {
|
||||
@ -258,23 +389,28 @@
|
||||
|
||||
const top = document.getElementById("top");
|
||||
top.remove();
|
||||
box.remove()
|
||||
div.appendChild(sub)
|
||||
box.remove();
|
||||
div.appendChild(sub);
|
||||
advanced.remove();
|
||||
document.body.appendChild(div)
|
||||
|
||||
window.EJS_player = "#game";
|
||||
window.EJS_gameName = parts.shift();
|
||||
window.EJS_biosUrl = "";
|
||||
window.EJS_biosUrl = biosUrl;
|
||||
window.EJS_gameUrl = url;
|
||||
window.EJS_core = core;
|
||||
window.EJS_pathtodata = "data/";
|
||||
window.EJS_startOnLoaded = true;
|
||||
window.EJS_DEBUG_XX = enableDebug;
|
||||
window.EJS_disableDatabases = true;
|
||||
window.EJS_threads = enableThreads;
|
||||
if (browserMode) {
|
||||
window.EJS_browserMode = browserMode;
|
||||
}
|
||||
window.EJS_cacheConfig = {
|
||||
enabled: true,
|
||||
cacheMaxSizeMB: 4096, // 4096 MB = 4 GB
|
||||
cacheMaxAgeMins: 7200 // 7200 minutes = 5 days
|
||||
};
|
||||
|
||||
script.src = "data/loader.js";
|
||||
document.body.appendChild(script);
|
||||
|
||||
6
jsconfig.json
Normal file
6
jsconfig.json
Normal file
@ -0,0 +1,6 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"module": "CommonJS",
|
||||
"target": "ES2020"
|
||||
}
|
||||
}
|
||||
@ -14,10 +14,10 @@ async function doMinify() {
|
||||
input: path.join(rootPath, "data/src/*.js"),
|
||||
output: path.join(rootPath, "data/emulator.min.js"),
|
||||
})
|
||||
.catch(function (err) {
|
||||
.catch((err) => {
|
||||
console.error(err);
|
||||
})
|
||||
.then(function() {
|
||||
.then(() => {
|
||||
console.log("Minified JS");
|
||||
});
|
||||
await minify({
|
||||
@ -25,14 +25,16 @@ async function doMinify() {
|
||||
input: path.join(rootPath, "data/emulator.css"),
|
||||
output: path.join(rootPath, "data/emulator.min.css"),
|
||||
})
|
||||
.catch(function (err) {
|
||||
.catch((err) => {
|
||||
console.error(err);
|
||||
})
|
||||
.then(function() {
|
||||
.then(() => {
|
||||
console.log("Minified CSS");
|
||||
});
|
||||
}
|
||||
|
||||
console.log("Minifying");
|
||||
await doMinify();
|
||||
console.log("Minifying Done!");
|
||||
(async () => {
|
||||
await doMinify();
|
||||
console.log("Minifying Done!");
|
||||
})();
|
||||
|
||||
@ -31,6 +31,7 @@
|
||||
"@emulatorjs/cores": "latest"
|
||||
},
|
||||
"devDependencies": {
|
||||
"eslint": "^9.39.2",
|
||||
"jsdoc": "^4.0.4",
|
||||
"nipplejs": "^0.10.2",
|
||||
"node-fetch": "^3.3.2",
|
||||
|
||||
34
update.js
34
update.js
@ -116,20 +116,22 @@ const updateContributors = async () => {
|
||||
console.log("Updated Contributors.md with new contributors.");
|
||||
}
|
||||
|
||||
console.log(`Current EmulatorJS Version: ${version}`);
|
||||
if (!update_version) {
|
||||
console.warn("Warning: Version number not provided.");
|
||||
} else {
|
||||
console.log(`Updating EmulatorJS Version number to: ${update_version}`);
|
||||
}
|
||||
(async () => {
|
||||
console.log(`Current EmulatorJS Version: ${version}`);
|
||||
if (!update_version) {
|
||||
console.warn("Warning: Version number not provided.");
|
||||
} else {
|
||||
console.log(`Updating EmulatorJS Version number to: ${update_version}`);
|
||||
}
|
||||
|
||||
console.log("Updating EmulatorJS dependencies...");
|
||||
if (depsArg) {
|
||||
await updateDependencies();
|
||||
}
|
||||
if (update_version || dev === "false" || dev === "true") {
|
||||
console.log("Updating EmulatorJS version...");
|
||||
await updateVersion(update_version || version);
|
||||
}
|
||||
await updateContributors();
|
||||
console.log("Updating EmulatorJS completed.");
|
||||
console.log("Updating EmulatorJS dependencies...");
|
||||
if (depsArg) {
|
||||
await updateDependencies();
|
||||
}
|
||||
if (update_version || dev === "false" || dev === "true") {
|
||||
console.log("Updating EmulatorJS version...");
|
||||
await updateVersion(update_version || version);
|
||||
}
|
||||
await updateContributors();
|
||||
console.log("Updating EmulatorJS completed.");
|
||||
})();
|
||||
|
||||
Loading…
Reference in New Issue
Block a user