diff --git a/.eslintrc b/.eslintrc index 6862cb8c5..fcb1cad3f 100644 --- a/.eslintrc +++ b/.eslintrc @@ -44,7 +44,7 @@ "contexts": [ "ClassDeclaration", // TODO(cemmer): require private methods as well - "MethodDefinition[accessibility!=private][key.name!=/^(get|set)[A-Z][a-zA-Z]+/]" + "MethodDefinition[accessibility!=private][key.name!=/^(get|set|with)[A-Z][a-zA-Z]+/]" ] }], "jsdoc/require-param": "off", @@ -111,7 +111,7 @@ // TypeScript doesn't do a good job of reporting indexed values as potentially undefined, such as `[1,2,3][999]` "unicorn/prefer-at": "error", // Try to enforce early terminations of loops, rather than statements such as `.find(x=>x)[0]` - "unicorn/prefer-array-find": "error", + "unicorn/prefer-array-find": ["error", {"checkFromLast": false}], "unicorn/prefer-array-flat": "error", "unicorn/prefer-array-flat-map": "error", "unicorn/prefer-includes": "error", diff --git a/.gitattributes b/.gitattributes index 09c000ee3..500122eff 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1,4 +1,4 @@ # Stop `core.autocrlf true` -*.lnx binary -*.nes binary -*.rom binary +test/fixtures/roms/** binary +*.cue text eol=lf +*.gdi text eol=crlf diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md index c84e9b688..8961b68ff 100644 --- a/.github/CONTRIBUTING.md +++ b/.github/CONTRIBUTING.md @@ -2,7 +2,7 @@ Welcome! If you're viewing this, it means that you are likely interested in contributing to the project. That's marvelous! -The following is a set of guidelines for contributing to `igir`. These guidelines are published in order to clarify expectations and reduce potential conflict of opinions. +The following is a set of guidelines for contributing to Igir. These guidelines are published in order to clarify expectations and reduce potential conflict of opinions. ## Feature requests & bug reports @@ -16,7 +16,7 @@ If you are experiencing an issues, please submit a detailed [bug report](https:/ [GitHub discussions](https://github.com/emmercm/igir/discussions) are a great tool for a number of topics: -- Getting help with `igir` CLI syntax or usage +- Getting help with Igir CLI syntax or usage - Clarifying support for specific features or scenarios - Brainstorming new feature requests - ...and more! @@ -27,13 +27,13 @@ Discussions are intended to be low-pressure spaces for questions and collaborati ### Environment setup -First, you will want to check out `igir`'s source code from GitHub: +First, you will want to check out Igir's source code from GitHub: ```shell git clone https://github.com/emmercm/igir.git ``` -`igir` is written in TypeScript for the Node.js runtime. The current version of Node.js that `igir` uses is defined under the `"volta"` object in the `package.json` file. After [installing](https://docs.volta.sh/guide/getting-started), Volta will make sure you're always using the correct Node.js version. +Igir is written in TypeScript for the Node.js runtime. The current version of Node.js that Igir uses is defined under the `"volta"` object in the `package.json` file. After [installing](https://docs.volta.sh/guide/getting-started), Volta will make sure you're always using the correct Node.js version. Third-party dependencies are managed and easily installed with npm: @@ -51,7 +51,7 @@ npm pack ### Running code -A script has been defined for the `npm start` command to easily run `igir`: +A script has been defined for the `npm start` command to easily run Igir: ```shell npm start -- [commands..] [options] @@ -65,25 +65,25 @@ npm start -- report --dat *.dat --input ROMs/ ### Code style -`igir` uses [ESLint](https://eslint.org/) as its linter and style enforcer. Rules have been specifically chosen to increase code consistency, safety, readability, and maintainability. +Igir uses [ESLint](https://eslint.org/) as its linter and style enforcer. Rules have been specifically chosen to increase code consistency, safety, readability, and maintainability. All code changes must pass the existing ESLint rules. Discussions on adding, removing, and changing ESLint rules should happen outside of pull requests that contain code changes, in their own dedicated pull request or discussion thread (above). ### Automated tests -`igir` uses [Jest](https://jestjs.io/) as its testing framework, and it uses [Codecov](https://about.codecov.io/) to ensure a minimum amount of test coverage. +Igir uses [Jest](https://jestjs.io/) as its testing framework, and it uses [Codecov](https://about.codecov.io/) to ensure a minimum amount of test coverage. All code changes must come with appropriate automated tests in order to prove correctness and to protect against future regressions. ### Docs -`igir` uses [MkDocs](https://www.mkdocs.org/) to turn Markdown files into a documentation website. +Igir uses [MkDocs](https://www.mkdocs.org/) to turn Markdown files into a documentation website. Appropriate updates must be made to all relevant documentation pages if functionality is added, removed, or changed. ### Git commit messages -`igir` is configured to squash-merge all pull requests, such that only the pull request title ends up in the commit history of the main branch. This means that individual commit messages are less important, and it puts more emphasis on quality pull request titles & descriptions. +Igir is configured to squash-merge all pull requests, such that only the pull request title ends up in the commit history of the main branch. This means that individual commit messages are less important, and it puts more emphasis on quality pull request titles & descriptions. That said, quality commit messages help future maintainers understand past intentions. Please use your best judgement on descriptive, clear, and concise commit messages. @@ -91,7 +91,7 @@ That said, quality commit messages help future maintainers understand past inten Here are steps that should be completed prior to submitting a pull request: -- [ ] Validate your change works as expected locally by running `igir` (not just the unit tests) +- [ ] Validate your change works as expected locally by running Igir (not just the unit tests) - [ ] Unit tests have been added to cover your change - [ ] `npm test` has been run locally for your change, to validate: - Your added & changed tests are passing @@ -113,8 +113,8 @@ To contribute code changes, you will need to: ## License -`igir` is licensed under [GNU General Public License v3.0](https://github.com/emmercm/igir/blob/main/LICENSE). +Igir is licensed under [GNU General Public License v3.0](https://github.com/emmercm/igir/blob/main/LICENSE). -โœ… That means that `igir` can be used for free commercially, can be modified, can be distributed, and can be used for private use. +โœ… That means that Igir can be used for free commercially, can be modified, can be distributed, and can be used for private use. โš ๏ธ But it also means that distribution of closed-source versions is _not_ allowed. diff --git a/.github/ISSUE_TEMPLATE/bug-report.yml b/.github/ISSUE_TEMPLATE/bug-report.yml index ea69ce21d..f0632eed2 100644 --- a/.github/ISSUE_TEMPLATE/bug-report.yml +++ b/.github/ISSUE_TEMPLATE/bug-report.yml @@ -15,7 +15,7 @@ body: attributes: label: Paste the command description: | - The exact `igir` command you ran when you experienced a bug. + The exact Igir command you ran when you experienced a bug. Include the full `node`, `npm`, or `npx` command when not running the standalone `igir` executable. validations: @@ -53,12 +53,12 @@ body: attributes: label: DAT(s) used description: | - Links to the DATs that were used for this `igir` command, if any. + Links to the DATs that were used for this Igir command, if any. - type: input attributes: label: igir version - description: What version of `igir` are you running? This is visible in the output header. + description: What version of Igir are you running? This is visible in the output header. validations: required: true @@ -68,7 +68,7 @@ body: description: | What version of Node.js are you running? This can be seen with the `node --version` command. - You can specify "N/A" when using a standalone version of `igir` (one downloaded from GitHub). + You can specify "N/A" when using a standalone version of Igir (one downloaded from GitHub). validations: required: true diff --git a/.github/workflows/codecov.yml b/.github/workflows/codecov.yml index 0be3cf904..96fee7b97 100644 --- a/.github/workflows/codecov.yml +++ b/.github/workflows/codecov.yml @@ -35,6 +35,9 @@ jobs: - uses: actions/checkout@v4 - uses: volta-cli/action@v4 - run: npm ci + - run: | + sudo apt-get update + sudo apt-get install -y libsdl2-2.0-0 libsdl2-ttf-2.0-0 # Run test coverage - run: npm run test:coverage diff --git a/.github/workflows/gh-automerge-rebase.yml b/.github/workflows/gh-automerge-rebase.yml index 7a64bc6f8..5cb9d1de2 100644 --- a/.github/workflows/gh-automerge-rebase.yml +++ b/.github/workflows/gh-automerge-rebase.yml @@ -12,6 +12,8 @@ on: push: branches: - 'main' + - '*feature*' + - '**/*feature*' schedule: # Every hour - cron: '0 * * * *' @@ -26,7 +28,7 @@ jobs: with: # GitHub won't run workflows off of code commits+pushes from the `github-actions` user token: ${{ secrets.PERSONAL_ACCESS_TOKEN }} - base: 'main' + base: ${{ github.head_ref || github.ref_name || 'main' }} required_approval_count: 0 require_passed_checks: false # Oldest pull request diff --git a/.github/workflows/gh-first-interaction.yml b/.github/workflows/gh-first-interaction.yml index 9cb03b78f..dda8cf39a 100644 --- a/.github/workflows/gh-first-interaction.yml +++ b/.github/workflows/gh-first-interaction.yml @@ -16,7 +16,7 @@ jobs: pr-message: | ## :wave: Welcome - Thank you for your first pull request, @${{ github.event.pull_request.head.user.login }}! If you haven't yet, please familiarize yourself with `igir`'s [contribution guidelines](https://github.com/emmercm/igir/blob/main/.github/CONTRIBUTING.md). + Thank you for your first pull request, @${{ github.event.pull_request.head.user.login }}! If you haven't yet, please familiarize yourself with Igir's [contribution guidelines](https://github.com/emmercm/igir/blob/main/.github/CONTRIBUTING.md). Some GitHub Actions CI workflows may not automatically run for you due to GitHub's [security best practices](https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/enabling-features-for-your-repository/managing-github-actions-settings-for-a-repository#controlling-changes-from-forks-to-workflows-in-public-repositories), so a maintainer may need to manually approve the workflows to run. As a result, it is important to make sure tests pass locally before submitting a pull request to help ensure a fast review. Thank you! diff --git a/.github/workflows/gh-pages.yml b/.github/workflows/gh-pages.yml index 6fc68100e..47f4de3ef 100644 --- a/.github/workflows/gh-pages.yml +++ b/.github/workflows/gh-pages.yml @@ -42,7 +42,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - run: docker run --rm --volume "${PWD}:/workdir" ghcr.io/igorshubovych/markdownlint-cli:latest --disable MD013 MD033 MD046 -- "**/*.md" + - run: docker run --rm --volume "${PWD}:/workdir" ghcr.io/igorshubovych/markdownlint-cli:latest --disable MD013 MD033 MD041 MD046 -- "**/*.md" build: needs: diff --git a/.github/workflows/node-test.yml b/.github/workflows/node-test.yml index 41fe28ce4..16573d494 100644 --- a/.github/workflows/node-test.yml +++ b/.github/workflows/node-test.yml @@ -69,6 +69,7 @@ jobs: - path-filter if: ${{ needs.path-filter.outputs.changes == 'true' }} runs-on: ${{ matrix.os }}-latest + timeout-minutes: 10 strategy: matrix: os: [ ubuntu, macos, windows ] @@ -82,6 +83,13 @@ jobs: with: node-version: ${{ matrix.node-version }} - run: npm ci + - if: ${{ matrix.os == 'macos' }} + run: brew install --overwrite sdl2 + - if: ${{ matrix.os == 'ubuntu' }} + run: | + sudo apt-get update + sudo apt-get install -y libsdl2-2.0-0 libsdl2-ttf-2.0-0 + # Test the source files - run: npm run test:unit @@ -90,6 +98,7 @@ jobs: - path-filter if: ${{ needs.path-filter.outputs.changes == 'true' }} runs-on: ubuntu-latest + timeout-minutes: 10 strategy: matrix: node-version: [ lts, 18, 16.7.0 ] @@ -102,6 +111,9 @@ jobs: with: node-version: ${{ matrix.node-version }} - run: npm ci + - run: | + sudo apt-get update + sudo apt-get install -y libsdl2-2.0-0 libsdl2-ttf-2.0-0 # Test the built files - run: npm run build - run: ./test/endToEndTest.sh diff --git a/.gitignore b/.gitignore index 89b5986ef..3d910f86d 100644 --- a/.gitignore +++ b/.gitignore @@ -108,7 +108,7 @@ dist dist/ site/ *.bat -*.cache +*.cache* *.sh !test/*.sh @@ -122,16 +122,21 @@ site/ *.bin *.cd1 *.cd2 +*.chd *.col +*.cso *.cue +*.dax *.dvd *.gb *.gba *.gbc +*.gcz *.gdi *.gg *.ic1 *.img +*.iso *.jar *.lo *.lyx @@ -142,17 +147,21 @@ site/ *.pce *.pk3 *.pup +*.raw *.rom +*.rvz *.sfc *.smc *.sms *.szx *.wad +*.wia *.x1 *.x1t *.zim *.z64 *.zip +*.zso # ROM pack excess *.bmp diff --git a/README.md b/README.md index 06487bd79..c7f2c12c9 100644 --- a/README.md +++ b/README.md @@ -1,44 +1,47 @@ -

๐Ÿ•น๏ธ igir

+
+ igir logo +
+
-

Pronounced "eager," igir is a video game ROM collection manager to help filter, sort, patch, archive, and report on collections on any OS.

+

Pronounced "eager," Igir is a zero-setup ROM collection manager that sorts, filters, extracts or archives, patches, and reports on collections of any size on any OS.

- CLI: Windows,macOS,Linux npm: version npm: downloads GitHub: downloads GitHub: stars + GitHub: discussions

Snyk: vulnerabilities codecov: coverage Code Climate: maintainability - license + license

See the project website for complete documentation, installation & usage instructions, and examples!


-## What does `igir` do? +## What does Igir do? A video of an example use case: -[![asciicast](https://asciinema.org/a/Sum1WBdZRsSTvbZvVuP5Ho1N9.svg)](https://asciinema.org/a/Sum1WBdZRsSTvbZvVuP5Ho1N9) +asciicast -With `igir` you can manage a ROM collection of any size: +With Igir you can manage a ROM collection of any size: -- ๐Ÿ” Scan for DATs, ROMs, and ROM patches - including those in archives (see [scanning](https://igir.io/input/file-scanning) & [archive docs](https://igir.io/input/reading-archives)) +- ๐Ÿ” Scan for DATs, ROMs, and ROM patchesโ€”including those in archives (see [scanning](https://igir.io/input/file-scanning) & [archive docs](https://igir.io/input/reading-archives)) - ๐Ÿ“‚ Organize ROM files by console (see [DAT docs](https://igir.io/dats/overview)) - ๐Ÿช„ Name ROM files consistently, including the right extension (see [DAT docs](https://igir.io/dats/overview)) - โœ‚๏ธ Filter out duplicate ROMs, or ROMs in languages you don't understand (see [filtering docs](https://igir.io/roms/filtering-preferences)) - ๐Ÿ—œ๏ธ Extract or archive ROMs in mass (see [archive docs](https://igir.io/output/writing-archives)) - ๐Ÿฉน Patch ROMs automatically in mass (see [scanning](https://igir.io/input/file-scanning) & [patching docs](https://igir.io/roms/patching)) -- ๐ŸŽฉ Parse ROMs with headers, and optionally remove them (see [header docs](https://igir.io/roms/headers)) +- ๐ŸŽฉ Parse ROMs with headers and optionally remove them (see [header docs](https://igir.io/roms/headers)) - โ†”๏ธ Build & re-build (un-merge, split, or merge) MAME ROM sets (see [arcade docs](https://igir.io/usage/arcade)) -- ๐Ÿ”ฎ Report on what ROMs are present or missing for each console, and create fixdats for missing ROMs (see [reporting](https://igir.io/output/reporting) & [DAT docs](https://igir.io/dats/overview)) +- ๐Ÿ”ฎ Report on what ROMs are present or missing for each console and create fixdats for missing ROMs (see [reporting](https://igir.io/output/reporting) & [DAT docs](https://igir.io/dats/overview)) -## How do I run `igir`? +## How do I run Igir? Either download the latest version for your OS from the [releases page](https://github.com/emmercm/igir/releases/latest), or if you have Node.js installed you can use [`npx`](https://docs.npmjs.com/cli/v9/commands/npx) to always run the latest version from the command line: diff --git a/codecov.yml b/codecov.yml index 3b7f31352..9f77df147 100644 --- a/codecov.yml +++ b/codecov.yml @@ -9,5 +9,5 @@ coverage: patch: off project: default: - target: 94% + target: 93% threshold: 1% diff --git a/docs/advanced/internals.md b/docs/advanced/internals.md index ce366f5ad..e7875180f 100644 --- a/docs/advanced/internals.md +++ b/docs/advanced/internals.md @@ -1,10 +1,10 @@ # Internal Operations -Information about the inner workings of `igir`. +Information about the inner workings of Igir. ## Order of operations -`igir` runs these steps in the following order: +Igir runs these steps in the following order: 1. Scan each DAT input path for every file and parse them, if provided (`--dat `) 2. Scan each ROM input path for every file (`--input `) @@ -24,6 +24,6 @@ Information about the inner workings of `igir`. - Written ROMs are tested for accuracy, if specified (`test`) - A "dir2dat" DAT is created, if specified (`dir2dat`) (see [dir2dat docs](../dats/dir2dat.md)) - A "fixdat" is created, if specified (`fixdat`) (see [fixdats docs](../dats/fixdats.md)) -5. "Moved" input ROMs are deleted (`move`) +5. Leftover "moved" input ROMs are deleted (`move`) 6. Unknown files are recycled from the output directory, if specified (`clean`, see [cleaning docs](../output/cleaning.md)) 7. An output report is written to the output directory, if specified (`report`, see [reporting docs](../output/reporting.md)) diff --git a/docs/advanced/logging.md b/docs/advanced/logging.md index e52800dd5..9eae7272e 100644 --- a/docs/advanced/logging.md +++ b/docs/advanced/logging.md @@ -1,6 +1,6 @@ # Logging -By default, `igir` will print the following log levels: +By default, Igir will print the following log levels: - `ERROR`: an unexpected error has prevented an intended [command](../commands.md) - `WARN`: a preventable error has prevented an intended [command](../commands.md) @@ -14,7 +14,7 @@ There are additional levels of verbosity that can be enabled with the `-v` flag: - Files being copied, zipped, and linked - [dir2dat](../dats/dir2dat.md) files being created - [Fixdat](../dats/fixdats.md) files being created - - Input files deleted after being moved + - Leftover input files deleted after being moved - Output files being [cleaned](../output/cleaning.md) (including files skipped due to `--clean-dry-run`) - [Report](../output/reporting.md) files being created @@ -40,7 +40,7 @@ There are additional levels of verbosity that can be enabled with the `-v` flag: igir [commands..] [options] -vv ``` - This level is helpful to turn on if you want debug why an action didn't take place. + This level is helpful to turn on if you want to debug why an action didn't take place. - **`TRACE` (`-vvv`): print information about actions taken, skipped, and additional information that can be helpful to debug issues.** diff --git a/docs/advanced/temp-dir.md b/docs/advanced/temp-dir.md index 0d8d89b08..511c99934 100644 --- a/docs/advanced/temp-dir.md +++ b/docs/advanced/temp-dir.md @@ -1,13 +1,13 @@ # Temp Directory -`igir` needs to write some temporary files to disk for a few reasons: +Igir needs to write some temporary files to disk for a few reasons: - Downloading [DAT URLs](../dats/processing.md#scanning-for-dats) to disk before parsing - Extracting [some archives](../input/reading-archives.md) to disk during scanning, and when reading when extracting or [zipping](../output/writing-archives.md) -Temporary files are ones that are deleted as soon as `igir` no longer needs them for processing. `igir` will also delete any leftover temporary files on exit. +Temporary files are ones that are deleted as soon as Igir no longer needs them for processing. Igir will also delete any leftover temporary files on exit. -`igir` will use your operating system's temporary directory for these files by default. The option `--temp-dir ` is provided to let you change the directory, and you may want to do this for a few reasons: +Igir will use your operating system's temporary directory for these files by default. The option `--temp-dir ` is provided to let you change the directory, and you may want to do this for a few reasons: - Your operating system drive has minimal space available - You want to protect your operating system drive from excess wear and tear diff --git a/docs/advanced/troubleshooting.md b/docs/advanced/troubleshooting.md index 9e0ae0fc5..b4a89667c 100644 --- a/docs/advanced/troubleshooting.md +++ b/docs/advanced/troubleshooting.md @@ -34,9 +34,9 @@ FATAL ERROR: Reached heap limit Allocation failed - JavaScript heap out of memor 11: 0x7fe14fed9ef6 ``` -The issue is that `igir` ran out of memory likely due to low system limits, large DAT packs, or large ROM collections. +The issue is that Igir ran out of memory likely due to low system limits, large DAT packs, or large ROM collections. -You likely need to process your ROM collection in batches, just be careful when using the [`igir clean` command](../commands.md). If you don't need every DAT from a pack, you can try reducing the number of DATs being processed with the [`--dat-*-regex ` and `--dat-*-regex-exclude ` options](../dats/processing.md#dat-filtering) like this: +You likely need to process your ROM collection in batches, just be careful when using the [`igir clean` command](../commands.md). If you don't need every DAT from a pack, you can try reducing the number of DATs being processed with the [`--dat-*-regex ` and `--dat-*-regex-exclude ` options](../dats/processing.md#dat-filtering) like this: ```shell igir [commands..] --dat "*.dat" --dat-name-regex "/nintendo/i" diff --git a/docs/alternatives.md b/docs/alternatives.md index 55b6e1044..747696fcd 100644 --- a/docs/alternatives.md +++ b/docs/alternatives.md @@ -2,44 +2,52 @@ There are a few different popular ROM managers that have similar features: -| Feature | [igir](index.md) | [clrmamepro](https://mamedev.emulab.it/clrmamepro/) | [RomVault](https://www.romvault.com/) | [RomCenter](http://www.romcenter.com/) | -|------------------------------------------|--------------------------------------------------------------------------------------------------|---------------------------------------------------------------|-------------------------------------------------------------|--------------------------------------------| -| Code: still in development | โœ… | โœ… | โœ… | โ“ | -| Code: open source | โœ… GPL | โŒ | โŒ | โŒ | -| App: OS compatibility | โœ… anything [Node.js supports](https://nodejs.org/en/download) | โš ๏ธ Windows, macOS & Linux via [Wine](https://www.winehq.org/) | โš ๏ธ Windows, Linux via [Mono](https://www.mono-project.com/) | โŒ Windows only | -| App: UI or CLI | CLI only by design | UI only | Separate UI & CLI versions | UI only | -| App: required setup steps | โœ… no setup required | โŒ requires "profile" setup per DAT | โš ๏ธ if specifying DAT & ROM dirs | โŒ requires per-DAT DB setup | -| DATs: supported formats | Logiqx XML, MAME ListXML, MAME Software List, CMPro, HTGD SMDB ([DATs docs](dats/processing.md)) | Logiqx XML, MAME ListXML, MAME Software List, CMPro | Logiqx XML, MAME ListXML, CMPro, RomCenter, HTGD SMDB | Logiqx XML, CMPro, RomCenter | -| DATs: process multiple at once | โœ… | โš ๏ธ via the batcher | โœ… | โŒ | -| DATs: infer parent/clone info | โœ… | โŒ | โŒ | โŒ | -| DATs: built-in download manager | โŒ | โŒ | โš ๏ธ via [DatVault](https://www.datvault.com/) | โŒ | -| DATs: supports DAT URLs | โœ… | โŒ | โŒ | โŒ | -| DATs: create from files (dir2dat) | โœ… [dir2dat docs](dats/dir2dat.md) | โœ… | โ“ | โŒ | -| DATs: fixdat creation | โœ… [Fixdat docs](dats/fixdats.md) | โœ… | โœ… | โŒ | -| DATs: combine multiple | โœ… | โŒ | โœ… | โŒ | -| Archives: extraction formats | โœ… many formats ([reading archives docs](input/reading-archives.md)) | โœ… `.zip`, `.7z`, `.rar` | โš ๏ธ `.zip`, `.7z` | โš ๏ธ `.zip`, `.7z` | -| Archives: creation formats | โŒ `.zip` only by design ([writing archives docs](output/writing-archives.md)) | โœ… `.zip`, `.7z`, `.rar` | โš ๏ธ `.zip` (TorrentZip), `.7z` | โš ๏ธ `.zip`, `.7z` | -| Archives: automatic extension correction | โœ… | โŒ | โŒ | โŒ | -| ROMs: checksum matching strategies | โœ… CRC32+size, MD5, SHA1, SHA256 | โš ๏ธ CRC32+size, MD5, SHA1 | โš ๏ธ CRC32+size, MD5, SHA1 | โ“ | -| ROMs: CHD scanning | โŒ | โš ๏ธ via chdman | โœ… v1-5 natively | โš ๏ธ v1-4 natively | -| ROMs: scan/checksum caching | โœ… | โŒ | โœ… | โœ… | -| ROMs: header parsing | โœ… | โœ… | โœ… | โš ๏ธ via plugins | -| ROMs: header removal | โœ… [automatic and forced](roms/headers.md) | โŒ | โŒ | โŒ | -| ROMs: automatic extension correction | โœ… [output writing docs](output/options.md#fixing-rom-extensions) | โŒ | โŒ | โŒ | -| ROMs: supported merge types | โœ… full non-merged, non-merged, split, merged | โœ… full non-merged, non-merged, split, merged | โš ๏ธ full non-merged, split, merged | โš ๏ธ full non-merged, split, merged | -| ROMs: patching support | โœ… [patching docs](roms/patching.md) | โŒ | โš ๏ธ SNES SuperDAT | โŒ | -| Filtering: region, language, type, etc. | โœ… [many options](roms/filtering-preferences.md#filters) | โŒ only 1G1R options | โŒ | โš ๏ธ only at DB setup | -| Filtering: 1G1R support | โœ… [many options](roms/filtering-preferences.md#preferences-for-1g1r) | โš ๏ธ region & language only | โŒ | โš ๏ธ only at DB setup | -| Reports: report-only mode | โœ… | โœ… | โœ… | โœ… | -| Reports: easily parseable | โœ… CSV | โš ๏ธ newline-separated "have" & "miss" lists | โš ๏ธ newline-separated "full" & "fix" reports | โš ๏ธ newline-separated "have" & "miss" lists | -| Output: file link support | โœ… hard & symbolic links | โŒ | โŒ | โŒ | -| Output: separate input & output dirs | โœ… | โŒ | โš ๏ธ yes but files are always moved | โŒ | -| Output: subdirectory customization | โœ… [many options](output/path-options.md) | โŒ | โš ๏ธ depends on DAT organization | โŒ | -| Output: create single archive for DAT | โœ… | โŒ | โœ… | โŒ | +| Feature | [igir](index.md) | [RomVault](https://www.romvault.com/) | [clrmamepro](https://mamedev.emulab.it/clrmamepro/) | [RomCenter](http://www.romcenter.com/) | +|------------------------------------------|--------------------------------------------------------------------------------------------------|-------------------------------------------------------------|---------------------------------------------------------------|--------------------------------------------| +| App: still in development | โœ… | โœ… | โœ… | โ“ | +| App: OS compatibility | โœ… anything [Node.js supports](https://nodejs.org/en/download) | โš ๏ธ Windows, Linux via [Mono](https://www.mono-project.com/) | โš ๏ธ Windows, macOS & Linux via [Wine](https://www.winehq.org/) | โŒ Windows only | +| App: UI or CLI | CLI only by design | Separate UI & CLI versions | UI only | UI only | +| App: required setup steps | โœ… no setup required | โš ๏ธ if specifying DAT & ROM dirs | โŒ requires "profile" setup per DAT | โŒ requires per-DAT DB setup | +| App: open source | โœ… GPL | โŒ | โŒ | โŒ | +| DATs: supported formats | Logiqx XML, MAME ListXML, MAME Software List, CMPro, HTGD SMDB ([DATs docs](dats/processing.md)) | Logiqx XML, MAME ListXML, CMPro, RomCenter, HTGD SMDB | Logiqx XML, MAME ListXML, MAME Software List, CMPro | Logiqx XML, CMPro, RomCenter | +| DATs: process multiple at once | โœ… | โœ… | โš ๏ธ via the batcher | โŒ | +| DATs: infer parent/clone info | โœ… | โŒ | โŒ | โŒ | +| DATs: built-in download manager | โŒ | โš ๏ธ via [DatVault](https://www.datvault.com/) | โŒ | โŒ | +| DATs: supports DAT URLs | โœ… | โŒ | โŒ | โŒ | +| DATs: create from files (dir2dat) | โœ… [dir2dat docs](dats/dir2dat.md) | โ“ | โœ… | โŒ | +| DATs: fixdat creation | โœ… [Fixdat docs](dats/fixdats.md) | โœ… | โœ… | โŒ | +| DATs: combine multiple | โœ… | โœ… | โŒ | โŒ | +| ROM Scanning: parallel scanning | โœ… | โŒ | โ“ | โ“ | +| ROM Scanning: scanning exclusions | โœ… | โŒ | โ“ | โ“ | +| ROM Scanning: quick scanning | โœ… [matching docs](roms/matching.md) | โœ… | โš ๏ธ by default | โ“ | +| ROM Scanning: scan/checksum caching | โœ… | โœ… | โŒ | โœ… | +| ROMs: checksum matching strategies | โœ… CRC32+size, MD5, SHA1, SHA256 | โš ๏ธ CRC32+size, MD5, SHA1 | โš ๏ธ CRC32+size, MD5, SHA1 | โ“ | +| ROMs: header detection | โœ… | โœ… | โš ๏ธ via supplemental XMLs | โš ๏ธ via plugins | +| ROMs: header removal | โœ… [automatic and forced](roms/headers.md) | โŒ | โŒ | โŒ | +| ROMs: automatic extension correction | โœ… [output writing docs](output/options.md#fixing-rom-extensions) | โŒ | โŒ | โŒ | +| ROMs: patching support | โœ… [patching docs](roms/patching.md) | โš ๏ธ SNES SuperDAT | โŒ | โŒ | +| Arcade: supported merge types | โœ… full non-merged, non-merged, split, merged ([arcade docs](usage/arcade.md)) | โš ๏ธ full non-merged, split, merged | โœ… full non-merged, non-merged, split, merged | โš ๏ธ full non-merged, split, merged | +| Arcade: CHD disk inclusion | โœ… by default, can be turned off ([arcade docs](usage/arcade.md)) | โœ… by default, can be turned off | โ“ | โ“ | +| Arcade: sample inclusion | โŒ | โŒ | โœ… | โ“ | +| Archives: extraction formats | โœ… many formats ([reading archives docs](input/reading-archives.md)) | โš ๏ธ `.zip`, `.7z` (natively) | โœ… `.zip`, `.7z` (via `7z`), `.rar` (via `rar`) | โš ๏ธ `.zip`, `.7z` | +| Archives: `.chd` support | โš ๏ธ via `chdman` (bundled) | โœ… v1-5 natively | โš ๏ธ via `chdman` | โš ๏ธ v1-4 natively | +| Archives: `.cso` & `.zso` support | โš ๏ธ via `maxcso` (bundled) | โŒ | โŒ | โŒ | +| Archives: `.nkit.iso` support | โš ๏ธ matching but no extraction [GameCube docs](usage/console/gamecube.md#nkit) | โŒ | โŒ | โŒ | +| Archives: creation formats | โŒ `.zip` only by design ([writing archives docs](output/writing-archives.md)) | โš ๏ธ `.zip` (TorrentZip), `.7z` (RV7Z) | โœ… `.zip`, `.7z`, `.rar` | โš ๏ธ `.zip`, `.7z` | +| Archives: contents checksums | โœ… when needed ([reading archives docs](input/reading-archives.md)) | โš ๏ธ requires "files only" mode | โš ๏ธ if DAT has forcepacking=unzip | โ“ | +| Archives: automatic extension correction | โœ… | โŒ | โŒ | โŒ | +| Filtering: region, language, type, etc. | โœ… [many options](roms/filtering-preferences.md#filters) | โŒ | โŒ only 1G1R options | โš ๏ธ only at DB setup | +| Filtering: 1G1R support | โœ… [many options](roms/filtering-preferences.md#preferences-for-1g1r) | โŒ | โš ๏ธ region & language only | โš ๏ธ only at DB setup | +| Reports: report-only mode | โœ… | โœ… | โœ… | โœ… | +| Reports: easily parseable | โœ… CSV | โš ๏ธ newline-separated "full" & "fix" reports | โš ๏ธ newline-separated "have" & "miss" lists | โš ๏ธ newline-separated "have" & "miss" lists | +| Output: file link support | โœ… hard & symbolic links | โŒ | โŒ | โŒ | +| Output: separate input & output dirs | โœ… | โš ๏ธ yes but files are always moved | โŒ | โŒ | +| Output: subdirectory customization | โœ… [many options](output/path-options.md) | โš ๏ธ depends on DAT organization | โŒ | โŒ | +| Output: create single archive for DAT | โœ… | โœ… | โŒ | โŒ | !!! note - Just like `igir`, other ROM managers that are in active development are likely to release new features often. The above table is not guaranteed to be perfectly up-to-date, it is just a best effort. + Just like Igir, other ROM managers that are in active development are likely to release new features often. The above table is not guaranteed to be perfectly up-to-date, it is just a best effort. Other alternative ROM managers can be found in a number of other wikis, such as: diff --git a/docs/cli.md b/docs/cli.md new file mode 100644 index 000000000..8ea595c26 --- /dev/null +++ b/docs/cli.md @@ -0,0 +1,40 @@ +# CLI Overview + +Igir uses a series of live-updating progress bars to indicate what it is currently working on and how much processing is left to do. + + + +See the [internal operations](advanced/internals.md#order-of-operations) page for more information on every processing that Igir might do. + +## Progress bar icons + +ASCII symbols are used to indicate what processing is happening. Here is a table of those symbols, in order: + +| Symbol (magenta) | Scanning operation | +|------------------------------------------------------------|-------------------------------------------------------------------------------------------| +| โ†ป (circle arrow) | Files (DATs, ROMs, patches, etc.) are being found/enumerated | +| โ†“ (down arrow) | [DATs](dats/introduction.md) are being [downloaded](dats/processing.md#scanning-for-dats) | +| ฮฃ (sigma) | [DATs](dats/introduction.md) are being parsed | +| # (hash) | ROMs are having checksums calculated for [matching](roms/matching.md) | +| ^ (hat) | ROMs are being checked for [headers](roms/headers.md) | + +| Symbol (cyan) | Per-DAT processing operation | +|--------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------| +| โˆฉ (intersection) | DATs are having parent/clone information [inferred](dats/processing.md#parentclone-inference) | +| โ†” (split arrow) | DATs are having [merge/split rules](usage/arcade.md#rom-set-merge-types) applied | +| ฮฃ (sigma) | ROMs are being [matched](roms/matching.md) to the DAT | +| โˆ† (delta) | DAT is being [filtered](roms/filtering-preferences.md#filters), ROM [1G1R rules](roms/filtering-preferences.md#preferences-for-1g1r) are being applied | +| . (period) | ROM matches hare having their [extension corrected](output/options.md#fixing-rom-extensions) | +| โ‰Ÿ (question equal) | ROM matches are being checked for issues | +| โˆช (union) | ROM matches are being combined into one zip | + +| Symbol (yellow) | Per-DAT writing operation | +|--------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------| +| # (hash) | Archives are having checksums calculated to [test](commands.md#test) after [writing](commands.md#rom-writing) | +| โ‰Ÿ (question equal) | Output files are being checked before being [overwritten](output/options.md#overwriting-files), no writing has started yet | +| โœŽ (pencil) | Output files are or have been written | + +| Symbol | Deleting operation | +|-------------------------------------------------------|-------------------------------------------------------------------------------------------------------------| +| โ™ป (recycle) | Output directory [cleaned files](output/cleaning.md) are being recycled | +| โœ• (x) | Moved ROM matches are being deleted, output directory [cleaned files](output/cleaning.md) are being deleted | diff --git a/docs/commands.md b/docs/commands.md index bfb947ecf..64abf9be2 100644 --- a/docs/commands.md +++ b/docs/commands.md @@ -1,6 +1,6 @@ # Commands -`igir` takes actions based on commands you specify. Each command has a clear input and output, and `igir` will never take surprise actions you did not specify. Multiple commands can (and will likely) be specified at once. +Igir takes actions based on commands you specify. Each command has a clear input and output, and Igir will never take surprise actions you did not specify. Multiple commands can (and will likely) be specified at once. !!! tip @@ -8,31 +8,31 @@ ## ROM writing -`igir` has three writing commands. Only one writing command can be specified at a time, and all require the `--output ` option. +Igir has three writing commands. Only one writing command can be specified at a time, and all require the `--output ` option. ### `copy` -Copy ROMs from an input directory to the output directory. +Copy files from an input directory to the output directory. Files in the input directories will be left alone, they will _not_ be modified or deleted. ### `move` -Move ROMs from an input directory to the output directory. The same directory can be specified for both input & output, resulting in ROMs being renamed as their names change in [DATs](dats/introduction.md). +Move files from an input directory to the output directory. The same directory can be specified for both input & output, resulting in ROMs being renamed as their names change in [DATs](dats/introduction.md). -ROMs will be deleted from their input directory after _all_ ROMs for _every_ [DAT](dats/introduction.md) have been written. +Files that match to multiple ROMs in [DATs](dats/introduction.md) will be copied as needed. ### `link` -Create a link in the output directory to a ROM in the input directory. +Create a link in the output directory to a file in the input directory. By default, hard links are created, similar to [ln(1)](https://linux.die.net/man/1/ln). Use the `--symlink` option to create symbolic links. -## ROM archiving +## ROM extracting & zipping -`igir` has two ROM archive commands. Archive commands require either the `copy` or `move` write command. Only one archive command can be specified at a time. +Igir has two ROM archive commands. Archive commands require either the `copy` or `move` write command. Only one archive command can be specified at a time. -If no archive command is specified, files will be left as-is. If they are already extracted, then they will stay extracted. If they are already archived (including non-`.zip` archives), then they will stay archived. +If no archive command is specified, files will be left as-is. If they are already extracted, then they will stay extracted. If they are already archived (including non-`.zip` archives), then they will stay archived in their original format. !!! note @@ -50,11 +50,15 @@ ROMs will be archived into a `.zip` file as they are being copied or moved. ROMs ROMs that are already in an archive will be re-archived. +!!! note + + You can use the [`--dat-combine` option](dats/processing.md#dat-combining) to cause every ROM in a DAT to be zipped together. + ## ROM verification ### `test` -After performing one of the ROM writing commands, verify that the file was written correctly. +After performing one of the ROM writing commands (above), verify that the file was written correctly. - `extract test` tests that each ROM file written has the correct size & checksum - `zip test` tests that the `.zip` file has all the correct archive entry sizes & checksums, and contains no excess entries diff --git a/docs/dats/dir2dat.md b/docs/dats/dir2dat.md index 1d5f2a132..de6e5cb0e 100644 --- a/docs/dats/dir2dat.md +++ b/docs/dats/dir2dat.md @@ -2,7 +2,7 @@ "dir2dat" refers to DATs that have been automatically created based on files in an input directory. [DATs](./introduction.md) generated this way are not typically useful as-is, they usually require some hand editing after creation. -`igir` has the ability to create these DATs with the `igir dir2dat` command. Example: +Igir can create these DATs with the `igir dir2dat` command. Example: ```shell igir dir2dat --input [--input ..] @@ -10,7 +10,7 @@ igir dir2dat --input [--input ..] ## dir2dat rules -`igir` uses the following rules when creating dir2dat DAT files: +Igir uses the following rules when creating dir2dat DAT files: - **A DAT file will be created for every input path.** @@ -87,6 +87,6 @@ Once DATs have been generated from input files, they are processed the same as a ## Alternative tools -It is unlikely that any ROM tool, including `igir`, will ever meet every person's exact DAT creation needs. +It is unlikely that any ROM tool, including Igir, will ever meet every person's exact DAT creation needs. [SabreTools](https://github.com/SabreTools/SabreTools) is a great tool for DAT management that offers many complex options for DAT creation, filtering, merging, and splitting. diff --git a/docs/dats/introduction.md b/docs/dats/introduction.md index 6285a8d9e..bc1513c43 100644 --- a/docs/dats/introduction.md +++ b/docs/dats/introduction.md @@ -8,7 +8,7 @@ From the [RetroPie docs](https://retropie.org.uk/docs/Validating%2C-Rebuilding%2 DATs are catalog files of every known ROM that exists per game system, complete with enough information to identify each file. -These DAT files ("DATs") help `igir` distinguish known ROM files in input directories from other files. Because DATs typically contain the complete catalog for a console, `igir` also uses them to generate reports for you on what ROMs were found and which are missing. +These DAT files ("DATs") help Igir distinguish known ROM files in input directories from other files. Because DATs typically contain the complete catalog for a console, Igir also uses them to generate reports for you on what ROMs were found and which are missing. The location to your DAT files are specified with the [`--dat ` option](./processing.md#scanning-for-dats): @@ -22,7 +22,7 @@ you can even specify archives that can contain multiple DATs (such as No-Intro's igir [commands..] --dat "No-Intro*.zip" --input ``` -See the [DAT processing page](./processing.md) for information on how `igir` scans for and processes DATs. +See the [DAT processing page](./processing.md) for information on how Igir scans for and processes DATs. ## DAT release groups @@ -46,9 +46,9 @@ And some less popular release groups are: ## Parent/clone (P/C) DATs -DATs that include "parent" and "clone" information help `igir` understand what game releases are actually the same game (are "clones" of each other). Frequently a game will be released in many regions or with different revisions, usually with only language translations and minor bug fixes. For example, No-Intro has 6+ "clones" of Pokรฉmon Blue cataloged. +DATs that include "parent" and "clone" information help Igir understand what game releases are actually the same game (are "clones" of each other). Frequently, a game will be released in many regions or with different revisions, usually with only language translations and minor bug fixes. For example, No-Intro has 6+ "clones" of Pokรฉmon Blue cataloged. -Being able to know that many releases are actually the same game gives `igir` the ability to produce "one game, one ROM" (1G1R) sets with the [`--single` option](../roms/filtering-preferences.md#preferences-for-1g1r). 1G1R sets include only one of these "clone" releases, usually filtered to a language and region, because many people don't care about ROMs they can't understand. +Being able to know that many releases are actually the same game gives Igir the ability to produce "one game, one ROM" (1G1R) sets with the [`--single` option](../roms/filtering-preferences.md#preferences-for-1g1r). 1G1R sets include only one of these "clone" releases, usually filtered to a language and region, because many people don't care about ROMs they can't understand. !!! note @@ -73,4 +73,4 @@ See the [arcade usage page](../usage/arcade.md) for more information on building ## Next steps -See the [DAT processing page](./processing.md) for information on how `igir` scans for and processes DATs. +See the [DAT processing page](./processing.md) for information on how Igir scans for and processes DATs. diff --git a/docs/dats/processing.md b/docs/dats/processing.md index 8333594d6..f1894427e 100644 --- a/docs/dats/processing.md +++ b/docs/dats/processing.md @@ -1,14 +1,14 @@ # DAT Processing -`igir` has a number of ways it can process [DATs](./introduction.md), and it processes them in the following order. +Igir has a number of ways it can process [DATs](./introduction.md), and it processes them in the following order. ## Just tell me what to do -[DATs](./introduction.md) can get fairly complicated, and there are many release groups each with their own focus areas and naming patterns. If all you want to do is organize your ROMs with `igir` in some sane way, follow these instructions: +[DATs](./introduction.md) can get fairly complicated, and there are many release groups, each with their own focus areas and naming patterns. If all you want to do is organize your ROMs with Igir in some consistent way, follow these instructions: 1. Go to the No-Intro DAT-o-MATIC [daily download page](https://datomatic.no-intro.org/index.php?page=download&s=64&op=daily) 2. Select the "P/C XML" radio option (as opposed to "standard DAT") and download the `.zip` to wherever you store your ROMs -3. Every time you run `igir`, specify the `.zip` file you downloaded with the `--dat ` option: +3. Every time you run Igir, specify the `.zip` file you downloaded with the `--dat ` option: ```shell igir [commands..] --dat "No-Intro*.zip" --input @@ -18,7 +18,7 @@ The `--dat ` option supports files, archives, directories, and globs like any of the other file options. See the [file scanning page](../input/file-scanning.md) for more information. -`igir` also supports URLs to DAT files and archives. This is helpful to make sure you're always using the most up-to-date version of a DAT hosted on sites such as GitHub. For example: +Igir also supports URLs to DAT files and archives. This is helpful to make sure you're always using the most up-to-date version of a DAT hosted on sites such as GitHub. For example: ```shell igir [commands..] --dat "https://raw.githubusercontent.com/libretro/libretro-database/master/dat/DOOM.dat" --input @@ -30,14 +30,14 @@ igir [commands..] --dat "https://raw.githubusercontent.com/libretro/libretro-dat ### Supported DAT formats -There have been a few DAT-like formats developed over the years. `igir` supports the following: +There have been a few DAT-like formats developed over the years. Igir supports the following: - [Logiqx XML](https://github.com/SabreTools/SabreTools/wiki/DatFile-Formats#logiqx-xml-format) (most common) (No-Intro, Redump, TOSEC, and more) - [MAME ListXML](https://easyemu.mameworld.info/mameguide/command_line/frontend_commands/listxml.html) (XML exported by the `mame -listxml` command) !!! tip - Instead of exporting the ListXML to a file yourself, you can also specify a MAME executable for the DAT path and then `igir` is smart enough to parse it: + Instead of exporting the ListXML to a file yourself, you can also specify a MAME executable for the DAT path and then Igir is smart enough to parse it: === ":simple-windowsxp: Windows" @@ -69,11 +69,11 @@ There have been a few DAT-like formats developed over the years. `igir` supports !!! tip - In case you come across a DAT in a format that `igir` doesn't support, SabreTools supports reading [a number of obscure formats](https://github.com/SabreTools/SabreTools/wiki/DatFile-Formats) and converting them to more standard formats such as Logiqx XML. + In case you come across a DAT in a format that Igir doesn't support, SabreTools supports reading [a number of obscure formats](https://github.com/SabreTools/SabreTools/wiki/DatFile-Formats) and converting them to more standard formats such as Logiqx XML. ## DAT filtering -To be able to process only the DATs you want in downloaded archives, `igir` has a few filtering options. +To be able to process only the DATs you want in downloaded archives, Igir has a few filtering options. ### DAT name regex filtering @@ -92,12 +92,12 @@ Headerless|Encrypted !!! tip - `--dat-name-regex-exclude` is particularly helpful for excluding some No-Intro DATs versions such as "encrypted" and "headerless". + `--dat-name-regex-exclude ` is particularly helpful for excluding some No-Intro DATs versions such as "encrypted" and "headerless". ### DAT description regex filtering ```text ---dat-description-regex, --dat-description-regex-exclude +--dat-description-regex , --dat-description-regex-exclude ``` These options limit which DATs are processed. The regex is applied to the DAT's description found within its file contents. @@ -108,11 +108,17 @@ The `--dat-combine` option lets you combine every game from every parsed DAT int This may be desirable when creating a [dir2dat](./dir2dat.md), a [fixdat](fixdats.md), or other complicated situations. +!!! note + + Using this option with the [`igir zip` command](../output/writing-archives.md) will result in all ROMs in a DAT being archived into one file. This can work great for archiving older, cartridge-based consoles with smaller ROM sizes, but will likely not work well with larger ROMs. + + To keep files organized in a human-readable way, it is _not_ recommended to use the [`--dir-game-subdir never`](../output/path-options.md#append-the-game-name) option along with `igir zip --dat-combine`. + ## Parent/clone inference -One feature that sets `igir` apart from other ROM managers is its ability to infer parent/clone information when DATs don't provide it. For example, Redump DATs don't provide parent/clone information, which makes it much more difficult to create 1G1R sets. +One feature that sets Igir apart from other ROM managers is its ability to infer parent/clone information when DATs don't provide it. For example, Redump DATs don't provide parent/clone information, which makes it much more difficult to create 1G1R sets. -For example, all of these Super Smash Bros. Melee releases should be considered the same game, even if a DAT doesn't provide proper information. If the releases are all considered the same game, then the `--single` option can be used in combination with [ROM preferences](../roms/filtering-preferences.md) to make a 1G1R set. `igir` is smart enough to understand that the only differences between these releases are the regions, languages, and revisions. +For example, all of these Super Smash Bros. Melee releases should be considered the same game, even if a DAT doesn't provide proper information. If the releases are all considered the same game, then the `--single` option can be used in combination with [ROM preferences](../roms/filtering-preferences.md) to make a 1G1R set. Igir is smart enough to understand that the only differences between these releases are the regions, languages, and revisions. ```text Super Smash Bros. Melee (Europe) (En,Fr,De,Es,It) @@ -124,14 +130,14 @@ Super Smash Bros. Melee (USA) (En,Ja) (Rev 2) !!! note - If a DAT has any parent/clone information then `igir` will use that and skip inference. If you want to ignore this information, you can provide the `--dat-ignore-parent-clone` option. + If a DAT has any parent/clone information then Igir will use that and skip inference. If you want to ignore this information, you can provide the `--dat-ignore-parent-clone` option. !!! note - It is unlikely that `igir` will ever be perfect with inferring parent/clone information. If you find an instance where `igir` made the wrong choice, please create a [GitHub issue](https://github.com/emmercm/igir/issues). + It is unlikely that Igir will ever be perfect with inferring parent/clone information. If you find an instance where Igir made the wrong choice, please create a [GitHub issue](https://github.com/emmercm/igir/issues). !!! tip [Retool](https://github.com/unexpectedpanda/retool) (no longer maintained) is a DAT manipulation tool that has a set of hand-maintained [parent/clone lists](https://github.com/unexpectedpanda/retool-clonelists-metadata) to supplement common DAT groups such as No-Intro and Redump. This helps cover situations such as release titles in different languages that would be hard to group together automatically. - 1G1R DATs made by Retool can be used seamlessly with `igir`. You won't need to supply the `--single` option or any [ROM preferences](../roms/filtering-preferences.md) for `igir`, as you would have already applied these preferences in Retool, but you can still supply [ROM filtering](../roms/filtering-preferences.md) options if desired. + 1G1R DATs made by Retool can be used seamlessly with Igir. You won't need to supply the `--single` option or any [ROM preferences](../roms/filtering-preferences.md) for Igir, as you would have already applied these preferences in Retool, but you can still supply [ROM filtering](../roms/filtering-preferences.md) options if desired. diff --git a/docs/input/file-scanning.md b/docs/input/file-scanning.md index 747be77d4..07fcbd3ef 100644 --- a/docs/input/file-scanning.md +++ b/docs/input/file-scanning.md @@ -1,6 +1,6 @@ # File Scanning -`igir` has a few options to specify input files, as well as files to exclude: +Igir has a few options to specify input files, as well as files to exclude: - ROMs: `--input ` (required), `--input-exclude ` - [DATs](../dats/processing.md): `--dat `, `--dat-exclude ` @@ -8,7 +8,7 @@ ## Archive files -`igir` can scan archives for DATs, ROMs, and patches. See the [archives](reading-archives.md) page for more information on supported formats. +Igir can scan archives for DATs, ROMs, and patches. See the [archives](reading-archives.md) page for more information on supported formats. ## Glob patterns diff --git a/docs/input/reading-archives.md b/docs/input/reading-archives.md index 2be505334..fb4379612 100644 --- a/docs/input/reading-archives.md +++ b/docs/input/reading-archives.md @@ -1,38 +1,41 @@ # Reading Archives -`igir` supports scanning the contents of archives for ROMs, DATs, and ROM patches. +Igir supports scanning the contents of archives for ROMs, DATs, and ROM patches. ## Supported types for reading -`igir` supports most common archive formats: - -| Extension | Contains file CRC32s | `igir` can extract without a third-party binary | `igir` can checksum without temporary files | -|--------------------------|----------------------|-------------------------------------------------|---------------------------------------------| -| `.7z` | โœ… | โŒ | โŒ | -| `.gz`, `.gzip` | โŒ CRC16 | โŒ | โŒ | -| `.rar` | โœ… | โœ… | โŒ | -| `.tar` | โŒ | โœ… | โœ… โ‰ค64MiB | -| `.tar.gz`, `.tgz` | โŒ | โœ… | โœ… โ‰ค64MiB | -| `.z01` | โœ… | โŒ | โŒ | -| `.zip` (including zip64) | โœ… | โœ… | โœ… โ‰ค64MiB | -| `.zip.001` | โœ… | โŒ | โŒ | -| `.zipx` | โœ… | โŒ | โŒ | +Igir supports most common archive formats: + +| Extension | Contains file CRC32s | Igir can extract without a third-party binary | Igir can checksum without temporary files | +|------------------------------------------------------------------|----------------------|-----------------------------------------------|-------------------------------------------| +| `.7z` | โœ… | โŒ `7za` | โŒ | +| `.chd` | โŒ SHA1 | โŒ `chdman` | โŒ | +| `.cso`, `.zso`, `.dax` | โŒ | โŒ `maxcso` | โš ๏ธ CRC32 only | +| `.gz`, `.gzip` | โŒ CRC16 | โŒ `7za` | โŒ | +| `.nkit.iso` ([GameCube docs](../usage/console/gamecube.md#nkit)) | โœ… | โŒ no extraction support | โœ… | +| `.rar` | โœ… | โœ… | โŒ | +| `.tar` | โŒ | โœ… | โœ… โ‰ค64MiB | +| `.tar.gz`, `.tgz` | โŒ | โœ… | โœ… โ‰ค64MiB | +| `.z01` | โœ… | โŒ `7za` | โŒ | +| `.zip` (including zip64) | โœ… | โœ… | โœ… โ‰ค64MiB | +| `.zip.001` | โœ… | โŒ `7za` | โŒ | +| `.zipx` | โœ… | โŒ `7za` | โŒ | **You should prefer archive formats that have CRC32 checksum information for each file.** -By default, `igir` uses CRC32 information to [match ROMs](../roms/matching.md) to DAT entries. If an archive already contains CRC32 information for each file, then `igir` doesn't need to extract each file and compute its CRC32. This can save a lot of time on large archives. +By default, Igir uses CRC32 information to [match ROMs](../roms/matching.md) to DAT entries. If an archive already contains CRC32 information for each file, then Igir doesn't need to extract each file and compute its CRC32. This can save a lot of time on large archives. -This is why you should use the [`igir zip` command](../output/writing-archives.md) when organizing your primary ROM collection. It is much faster for `igir` to scan archives with CRC32 information, speeding up actions such as merging new ROMs into an existing collection. +This is why you should use the [`igir zip` command](../output/writing-archives.md) when organizing your primary ROM collection. It is much faster for Igir to scan archives with CRC32 information, speeding up actions such as merging new ROMs into an existing collection. -**You should prefer archive formats that `igir` can extract natively.** +**You should prefer archive formats that Igir can extract natively.** -Somewhat proprietary archive formats such as `.7z` and `.rar` require `igir` to use an external tool to enumerate and extract files. This can greatly slow down processing speed. +Somewhat proprietary archive formats such as `.7z` and `.rar` require Igir to use an external tool to enumerate and extract files. This can greatly slow down processing speed. -This is why `igir` uses `.zip` as its output archive of choice, `.zip` files are easy and fast to read, even if they can't offer as high of compression as other formats. +This is why Igir uses `.zip` as its output archive of choice, `.zip` files are easy and fast to read, even if they can't offer as high of compression as other formats. ## Exact archive matching -Some DAT files such as the [libretro BIOS System.dat](https://github.com/libretro/libretro-database/blob/master/dat/System.dat) catalog archives such as zip files, rather than the contents of those archives. By default, `igir` will try to detect DATs like these and calculate checksums for all archive files, in addition to the files they contain. +Some DAT files such as the [libretro BIOS System.dat](https://github.com/libretro/libretro-database/blob/master/dat/System.dat) catalog archives such as zip files, rather than the contents of those archives. By default, Igir will try to detect DATs like these and calculate checksums for all archive files, in addition to the files they contain. This adds a potentially non-trivial amount of processing time during ROM scanning, so this behavior can be turned off with the option: @@ -40,7 +43,7 @@ This adds a potentially non-trivial amount of processing time during ROM scannin --input-checksum-archives never ``` -If for some reason `igir` isn't identifying an input file correctly as an archive, this additional processing can be forced with the option: +If for some reason Igir isn't identifying an input file correctly as an archive, this additional processing can be forced with the option: ```text --input-checksum-archives always @@ -48,6 +51,6 @@ If for some reason `igir` isn't identifying an input file correctly as an archiv ## Checksum cache -It can be expensive to calculate checksums of files within archives, especially MD5, SHA1, and SHA256. If `igir` needs to calculate a checksum that is not easily read from the archive (see above), it will cache the result in a file named `igir.cache`. This cached result will then be used as long as the input file's size and modified timestamp remain the same. +It can be expensive to calculate checksums of files within archives, especially MD5, SHA1, and SHA256. If Igir needs to calculate a checksum not easily read from the archive (see above), it will cache the result in a file named `igir.cache`. This cached result will then be used as long as the input file's size and modified timestamp remain the same. -The location of this cache file can be controlled with the `--cache-path ` option, or caching can be disabled entirely with the `--disable-cache` option. You can safely delete `igir.cache` when `igir` isn't running if the file becomes too large for you. +The location of this cache file can be controlled with the `--cache-path ` option, or caching can be disabled entirely with the `--disable-cache` option. You can safely delete `igir.cache` when Igir isn't running if the file becomes too large for you. diff --git a/docs/installation.md b/docs/installation.md index 979a3a217..cc6475a71 100644 --- a/docs/installation.md +++ b/docs/installation.md @@ -1,15 +1,15 @@ # Installation -`igir` is supported on :simple-windowsxp: Windows, :simple-apple: macOS, :simple-linux: Linux, and every other operating system that [Node.js](https://nodejs.org) supports. +Igir is supported on :simple-windowsxp: Windows, :simple-apple: macOS, :simple-linux: Linux, and every other operating system that [Node.js](https://nodejs.org) supports. -There are a few different installation options offered for `igir` with varying levels of technical complexity. Every option will require some baseline understanding of command-line interfaces (CLIs). +There are a few different installation options offered for Igir with varying levels of technical complexity. Every option will require some baseline understanding of command-line interfaces (CLIs). ## Via Node.js [![npm: version](https://img.shields.io/npm/v/igir?color=%23cc3534&label=version&logo=npm&logoColor=white)](https://www.npmjs.com/package/igir) [![Node.js](https://img.shields.io/node/v/igir?label=Node.js&logo=node.js&logoColor=white)](https://nodejs.org/en/download/) -The best way to ensure that you are always running the most up-to-date version of `igir` is to run it via [`npx`](https://docs.npmjs.com/cli/v9/commands/npx) which comes installed with [Node.js](https://nodejs.org/en/download/): +The best way to ensure that you are always running the most up-to-date version of Igir is to run it via [`npx`](https://docs.npmjs.com/cli/v9/commands/npx) which comes installed with [Node.js](https://nodejs.org/en/download/): ```shell npx igir@latest [commands..] [options] @@ -21,7 +21,7 @@ for example: npx igir@latest copy extract --dat *.dat --input ROMs/ --output ROMs-Sorted/ --dir-dat-name ``` -[![asciicast](https://asciinema.org/a/hjMOlN3DwSgo9NGHzPtncOoq9.svg)](https://asciinema.org/a/hjMOlN3DwSgo9NGHzPtncOoq9) + !!! tip @@ -41,7 +41,7 @@ npx igir@latest copy extract --dat *.dat --input ROMs/ --output ROMs-Sorted/ --d !!! note - If you want to help beta test `igir`, you can run the most bleeding-edge version with the command: + If you want to help beta test Igir, you can run the most bleeding-edge version with the command: ```shell npm exec --yes -- "github:emmercm/igir#main" [commands..] [options] @@ -49,14 +49,14 @@ npx igir@latest copy extract --dat *.dat --input ROMs/ --output ROMs-Sorted/ --d ## Via Homebrew (macOS) -[Homebrew](https://brew.sh/) is third-party package manager for macOS. You can install `igir` with these simple commands: +[Homebrew](https://brew.sh/) is a third-party package manager for macOS. You can install Igir with these simple commands: ```shell brew tap emmercm/igir brew install igir ``` -You can then update `igir` with _either_ of these commands +You can then update Igir with _either_ of these commands ```shell # Update every Homebrew package @@ -71,43 +71,3 @@ brew upgrade igir [![GitHub: release](https://img.shields.io/github/v/release/emmercm/igir?color=%236e5494&logo=github&logoColor=white)](https://github.com/emmercm/igir/releases/latest) If you don't want to download Node.js, you can download executables for various OSes from the [GitHub releases](https://github.com/emmercm/igir/releases) page. - -## Via Docker - -If none of the above options work for you, [Docker](https://www.docker.com/) may be an option. You will need to mount your input and output directories as volumes, which will significantly reduce your file read and write speeds. - -=== ":simple-windowsxp: Windows" - - ```batch - docker run --interactive --tty --rm ^ - --volume "%cd%:\pwd" ^ - --workdir "/pwd" ^ - node:lts ^ - npx igir@latest copy zip --dat "*.dat" --input ROMs\ --output ROMs-Sorted\ --dir-dat-name - ``` - -=== ":simple-apple: macOS" - - ```shell - docker run --interactive --tty --rm \ - --volume "$PWD:/pwd" \ - --workdir "/pwd" \ - node:lts \ - npx igir@latest copy zip --dat "*.dat" --input ROMs/ --output ROMs-Sorted/ --dir-dat-name - ``` - -=== ":simple-linux: Linux" - - ```shell - docker run --interactive --tty --rm \ - --volume "$PWD:/pwd" \ - --workdir "/pwd" \ - node:lts \ - npx igir@latest copy zip --dat "*.dat" --input ROMs/ --output ROMs-Sorted/ --dir-dat-name - ``` - -!!! warning - - Make sure to quote all of your [file globs](input/file-scanning.md)! - -[![asciicast](https://asciinema.org/a/5OAVbSXXoosTr0WyBvjQGBqRp.svg)](https://asciinema.org/a/5OAVbSXXoosTr0WyBvjQGBqRp) diff --git a/docs/overview.md b/docs/introduction.md similarity index 68% rename from docs/overview.md rename to docs/introduction.md index 227f6f1b5..0b0cd41aa 100644 --- a/docs/overview.md +++ b/docs/introduction.md @@ -1,4 +1,4 @@ -# Overview +# Introduction ## What is a ROM? @@ -8,7 +8,7 @@ From [Wikipedia](https://en.wikipedia.org/wiki/ROM_image): ROMs are complete copies of game data stored in cartridges or on discs. -A game may consist of multiple ROMs. For example, arcade cabinets that contain multiple chips, or disc-based games that have multiple tracks on the disc. +A game may consist of multiple ROMs. For example, arcade cabinets, which contain multiple chips, or disc-based games that have multiple tracks on the disc. ## What is a ROM manager? @@ -19,22 +19,22 @@ ROM managers are applications that serve two main purposes: all additional features help serve these two purposes. -Most ROM managers can automatically read & write many different ROM types including those in [archives](input/reading-archives.md) and those with [headers](roms/headers.md) so that you don't have to do much pre-work. +Most ROM managers can automatically read & write many different ROM types, including those in [archives](input/reading-archives.md) and those with [headers](roms/headers.md) so that you don't have to do much pre-work. -Most ROM managers rely on [DATs](dats/introduction.md), files that catalog every known ROM that exists per game system. DATs are published by release groups dedicated to keeping these catalogs accurate and up-to-date. DATs help ROM collectors name their ROMs in a consistent way as well as understand what ROMs may be missing from their collection. +Most ROM managers rely on [DATs](dats/introduction.md), files that catalog every known ROM that exists per game system. DATs are published by release groups dedicated to keeping these catalogs accurate and up to date. DATs help ROM collectors name their ROMs consistently as well as understand what ROMs may be missing from their collection. -## What is `igir`? +## What is Igir? -`igir` is a ROM manager for the modern age. +Igir is a ROM manager for the modern age. -Most ROM managers are only built for Windows, and some offer workarounds for running on macOS and Linux. Most of these managers have confusing GUIs that make batch-able, repeatable actions difficult. `igir` is a command line tool that works on any OS. +Most ROM managers are only built for Windows, and some offer workarounds for running on macOS and Linux. Most of these managers have confusing GUIs that make batch-able, repeatable actions difficult. Igir is a command line tool that works on any OS. -In addition, `igir` has features that aren't found in any other ROM managers, such as [ROM patching](roms/patching.md). +In addition, Igir has features that aren't found in any other ROM managers, such as [ROM patching](roms/patching.md). !!! info - See the [alternative managers](alternatives.md) page for a feature comparison between `igir` and other ROM managers. + See the [alternative managers](alternatives.md) page for a feature comparison between Igir and other ROM managers. ## Next steps -See the [installation](installation.md) page for instructions on getting `igir` installed. +See the [installation](installation.md) page for instructions on getting Igir installed. diff --git a/docs/output/cleaning.md b/docs/output/cleaning.md index 5f35aef94..7b8dafc3b 100644 --- a/docs/output/cleaning.md +++ b/docs/output/cleaning.md @@ -15,9 +15,9 @@ In practical terms, this means: **2. If [tokens](tokens.md) are used with the `--output ` option, only subdirectories that are written to will be considered for cleaning.** -For example, if the output directory is specified as `--output "games/{mister}"`, and only Game Boy Color games are found in `--input `, then only the `games/Gameboy/` directory would be considered for cleaning. Other directories that may already exist such as `games/GBA/` and `games/NES/` would _not_ be considered for cleaning, as `igir` did not write there. +For example, if the output directory is specified as `--output "games/{mister}"`, and only Game Boy Color games are found in `--input `, then only the `games/Gameboy/` directory would be considered for cleaning. Other directories that may already exist such as `games/GBA/` and `games/NES/` would _not_ be considered for cleaning, as Igir did not write there. -In other words, `games/{mister}` is _not_ equivalent to `games/*`. `igir` will _not_ indiscriminately delete files in `games/`. +In other words, `games/{mister}` is _not_ equivalent to `games/*`. Igir will _not_ indiscriminately delete files in `games/`. If you want to clean _every_ directory in `games/`, you could specify it as both the `--input ` and `--output `: @@ -47,7 +47,7 @@ See the [Analogue Pocket](../usage/hardware/analogue-pocket.md) page for a pract ## Backing up cleaned files -By default, `igir` will recycle cleaned files, and if recycle fails then it will delete them. This is potentially destructive, so a `--clean-backup ` option is provided to instead move files to a backup directory. +By default, Igir will recycle cleaned files, and if recycle fails, then it will delete them. This is potentially destructive, so a `--clean-backup ` option is provided to instead move files to a backup directory. The input directory structure is not maintained, no subdirectories will be created in the backup directory. Files of conflicting names will have a number appended to their name, e.g. `File (1).rom`. diff --git a/docs/output/options.md b/docs/output/options.md index 01ff9c926..ef24a1a3a 100644 --- a/docs/output/options.md +++ b/docs/output/options.md @@ -2,7 +2,7 @@ ## Overwriting files -By default, `igir` will _not_ overwrite or delete any files already in the output directory. +By default, Igir will _not_ overwrite or delete any files already in the output directory. To change this behavior, the `--overwrite` option will force overwriting files in the output directory as necessary. Be careful with this option as it can cause unnecessary wear and tear on your hard drives. @@ -10,7 +10,7 @@ The `--overwrite-invalid` option can also overwrite files in the output director ## Fixing ROM extensions -ROM dumpers don't always do a good job of using the generally accepted filename extension when writing files. In situations where DATs aren't provided, or information in DATs is incomplete, `igir` has some ability to find the correct extension that filenames should have. This is done using [file signatures](https://en.wikipedia.org/wiki/List_of_file_signatures), pieces of data that are common to every file of a certain format. +ROM dumpers don't always do a good job of using the generally accepted filename extension when writing files. In situations where DATs aren't provided, or information in DATs is incomplete, Igir has some ability to find the correct extension that filenames should have. This is done using [file signatures](https://en.wikipedia.org/wiki/List_of_file_signatures), pieces of data that are common to every file of a certain format. Here are some examples of common mistakes: diff --git a/docs/output/path-options.md b/docs/output/path-options.md index 562243258..d4947d84b 100644 --- a/docs/output/path-options.md +++ b/docs/output/path-options.md @@ -1,6 +1,6 @@ # Output Path Options -`igir` offer many options to control how ROMs are sorted in the specified output directory. +Igir offer many options to control how ROMs are sorted in the specified output directory. All `--dir-*` options append subdirectories to whatever is specified in the `--output ` option. Many `--dir-*` options have an [output path token](./tokens.md) equivalent, which also controls how ROMs are sorted. @@ -485,3 +485,67 @@ You can also combine this option with `--dir-letter-count ` for ranges wi ```text --dir-game-subdir ``` + +By default, games with multiple ROMs are grouped together into their own output subdirectory. This is because emulators typically expect these files to be next to each other, but also because different games may have duplicate filenames (e.g. Sega Dreamcast GDIs all have a `track01.bin`). + +```text +ROMS-Output/ +โ””โ”€โ”€ TOSEC + โ”œโ”€โ”€ Sega Dreamcast - Games - US + โ”‚ โ”œโ”€โ”€ Sonic Adventure 2 v1.008 (2001)(Sega)(US)(M5)[!][3S] + โ”‚ โ”‚ โ”œโ”€โ”€ Sonic Adventure 2 v1.008 (2001)(Sega)(US)(M5)[!][3S].gdi + โ”‚ โ”‚ โ”œโ”€โ”€ track01.bin + โ”‚ โ”‚ โ”œโ”€โ”€ track02.raw + โ”‚ โ”‚ โ””โ”€โ”€ track03.bin + โ”‚ โ””โ”€โ”€ Sonic Adventure v1.005 (1999)(Sega)(US)(M5)[!][26S] + โ”‚ โ”œโ”€โ”€ Sonic Adventure v1.005 (1999)(Sega)(US)(M5)[!][26S].gdi + โ”‚ โ”œโ”€โ”€ track01.bin + โ”‚ โ”œโ”€โ”€ track02.raw + โ”‚ โ””โ”€โ”€ track03.bin + โ””โ”€โ”€ Sega Mega-CD & Sega CD - CD - Games - [ISO] + โ””โ”€โ”€ Sonic CD (1993)(Sega)(NTSC)(US)[!][SEGA4407RE152 R7D] + โ”œโ”€โ”€ Sonic CD (1993)(Sega)(NTSC)(US)(Track 01 of 35)[!][SEGA4407RE152 R7D].iso + โ”œโ”€โ”€ Sonic CD (1993)(Sega)(NTSC)(US)(Track 02 of 35)[!][SEGA4407RE152 R7D].wav + โ”œโ”€โ”€ Sonic CD (1993)(Sega)(NTSC)(US)(Track 03 of 35)[!][SEGA4407RE152 R7D].wav + โ”œโ”€โ”€ Sonic CD (1993)(Sega)(NTSC)(US)(Track 04 of 35)[!][SEGA4407RE152 R7D].wav + โ”œโ”€โ”€ Sonic CD (1993)(Sega)(NTSC)(US)(Track 05 of 35)[!][SEGA4407RE152 R7D].wav + โ”œโ”€โ”€ Sonic CD (1993)(Sega)(NTSC)(US)(Track 06 of 35)[!][SEGA4407RE152 R7D].wav + โ”œโ”€โ”€ Sonic CD (1993)(Sega)(NTSC)(US)(Track 07 of 35)[!][SEGA4407RE152 R7D].wav + โ”œโ”€โ”€ Sonic CD (1993)(Sega)(NTSC)(US)(Track 08 of 35)[!][SEGA4407RE152 R7D].wav + โ”œโ”€โ”€ Sonic CD (1993)(Sega)(NTSC)(US)(Track 09 of 35)[!][SEGA4407RE152 R7D].wav + โ”œโ”€โ”€ Sonic CD (1993)(Sega)(NTSC)(US)(Track 10 of 35)[!][SEGA4407RE152 R7D].wav + โ”œโ”€โ”€ Sonic CD (1993)(Sega)(NTSC)(US)(Track 11 of 35)[!][SEGA4407RE152 R7D].wav + โ”œโ”€โ”€ Sonic CD (1993)(Sega)(NTSC)(US)(Track 12 of 35)[!][SEGA4407RE152 R7D].wav + โ”œโ”€โ”€ Sonic CD (1993)(Sega)(NTSC)(US)(Track 13 of 35)[!][SEGA4407RE152 R7D].wav + โ”œโ”€โ”€ Sonic CD (1993)(Sega)(NTSC)(US)(Track 14 of 35)[!][SEGA4407RE152 R7D].wav + โ”œโ”€โ”€ Sonic CD (1993)(Sega)(NTSC)(US)(Track 15 of 35)[!][SEGA4407RE152 R7D].wav + โ”œโ”€โ”€ Sonic CD (1993)(Sega)(NTSC)(US)(Track 16 of 35)[!][SEGA4407RE152 R7D].wav + โ”œโ”€โ”€ Sonic CD (1993)(Sega)(NTSC)(US)(Track 17 of 35)[!][SEGA4407RE152 R7D].wav + โ”œโ”€โ”€ Sonic CD (1993)(Sega)(NTSC)(US)(Track 18 of 35)[!][SEGA4407RE152 R7D].wav + โ”œโ”€โ”€ Sonic CD (1993)(Sega)(NTSC)(US)(Track 19 of 35)[!][SEGA4407RE152 R7D].wav + โ”œโ”€โ”€ Sonic CD (1993)(Sega)(NTSC)(US)(Track 20 of 35)[!][SEGA4407RE152 R7D].wav + โ”œโ”€โ”€ Sonic CD (1993)(Sega)(NTSC)(US)(Track 21 of 35)[!][SEGA4407RE152 R7D].wav + โ”œโ”€โ”€ Sonic CD (1993)(Sega)(NTSC)(US)(Track 22 of 35)[!][SEGA4407RE152 R7D].wav + โ”œโ”€โ”€ Sonic CD (1993)(Sega)(NTSC)(US)(Track 23 of 35)[!][SEGA4407RE152 R7D].wav + โ”œโ”€โ”€ Sonic CD (1993)(Sega)(NTSC)(US)(Track 24 of 35)[!][SEGA4407RE152 R7D].wav + โ”œโ”€โ”€ Sonic CD (1993)(Sega)(NTSC)(US)(Track 25 of 35)[!][SEGA4407RE152 R7D].wav + โ”œโ”€โ”€ Sonic CD (1993)(Sega)(NTSC)(US)(Track 26 of 35)[!][SEGA4407RE152 R7D].wav + โ”œโ”€โ”€ Sonic CD (1993)(Sega)(NTSC)(US)(Track 27 of 35)[!][SEGA4407RE152 R7D].wav + โ”œโ”€โ”€ Sonic CD (1993)(Sega)(NTSC)(US)(Track 28 of 35)[!][SEGA4407RE152 R7D].wav + โ”œโ”€โ”€ Sonic CD (1993)(Sega)(NTSC)(US)(Track 29 of 35)[!][SEGA4407RE152 R7D].wav + โ”œโ”€โ”€ Sonic CD (1993)(Sega)(NTSC)(US)(Track 30 of 35)[!][SEGA4407RE152 R7D].wav + โ”œโ”€โ”€ Sonic CD (1993)(Sega)(NTSC)(US)(Track 31 of 35)[!][SEGA4407RE152 R7D].wav + โ”œโ”€โ”€ Sonic CD (1993)(Sega)(NTSC)(US)(Track 32 of 35)[!][SEGA4407RE152 R7D].wav + โ”œโ”€โ”€ Sonic CD (1993)(Sega)(NTSC)(US)(Track 33 of 35)[!][SEGA4407RE152 R7D].wav + โ”œโ”€โ”€ Sonic CD (1993)(Sega)(NTSC)(US)(Track 34 of 35)[!][SEGA4407RE152 R7D].wav + โ”œโ”€โ”€ Sonic CD (1993)(Sega)(NTSC)(US)(Track 35 of 35)[!][SEGA4407RE152 R7D].wav + โ””โ”€โ”€ Sonic CD (1993)(Sega)(NTSC)(US)[!][SEGA4407RE152 R7D].cue +``` + +You can change this behavior with the `--dir-game-subdir ` option: + +| Mode | Outcome | +|------------------------------------|------------------------------------------------------------------------------------------------------------------| +| `--dir-game-subdir never` | Games with multiple ROMs are never grouped into their own subdirectory, which may cause conflicting output files | +| `--dir-game-subdir auto` (default) | Games with multiple ROMs are grouped into their own subdirectory, games with a single ROM are not | +| `--dir-game-subdir always` | Every game is grouped into its on subdirectory, no matter the number of ROMs it has | diff --git a/docs/output/reporting.md b/docs/output/reporting.md index b2059d434..dbc5d68f5 100644 --- a/docs/output/reporting.md +++ b/docs/output/reporting.md @@ -11,9 +11,9 @@ When using DATs (the [`--dat ` option](../dats/processing.md#scanning-for- - `UNUSED`: what input files didn't match to any ROM - `DELETED`: what output files were [cleaned](cleaning.md) (`igir clean` command) -At least one DAT is required for the `igir report` command to work, otherwise `igir` has no way to understand what input files are known ROMs and which aren't. See the [DAT docs](../dats/introduction.md) for more information about DATs. +At least one DAT is required for the `igir report` command to work, otherwise Igir has no way to understand what input files are known ROMs and which aren't. See the [DAT docs](../dats/introduction.md) for more information about DATs. -The `igir report` command can be specified on its own without any [writing command](../commands.md) (i.e. `igir copy`, `igir move`, etc.) in order to report on an existing collection. This causes `igir` to operate in a _read-only_ mode, no files will be copied, moved, or deleted. For example: +The `igir report` command can be specified on its own without any [writing command](../commands.md) (i.e. `igir copy`, `igir move`, etc.) to report on an existing collection. This causes Igir to operate in a _read-only_ mode, no files will be copied, moved, or deleted. For example: === ":simple-windowsxp: Windows" @@ -48,7 +48,7 @@ See the `igir --help` message for the report's default location. The output report format is a standard CSV that can be opened in Microsoft Excel, Apple Numbers, Google Sheets, LibreOffice Calc, and other similar spreadsheet applications. -Unlike the report formats of [other ROM managers](../alternatives.md), CSVs allow you to filter rows by column values. For example, you can filter the "Status" column to only "MISSING" to understand what ROMs are missing from your collection, or to "UNUSED" to understand what input files weren't used as the source of any output file. The ability to filter CSVs in spreadsheet applications means that `igir` should not need use-case-specific report options to achieve your goal. +Unlike the report formats of [other ROM managers](../alternatives.md), CSVs allow you to filter rows by column values. For example, you can filter the "Status" column to only "MISSING" to understand what ROMs are missing from your collection, or to "UNUSED" to understand what input files weren't used as the source of any output file. The ability to filter CSVs in spreadsheet applications means that Igir shouldnโ€™t need use-case-specific report options to achieve your goal. To perform this filtering, most spreadsheet applications have a button or menu item to "create a filter" or "auto filter." diff --git a/docs/output/tokens.md b/docs/output/tokens.md index 1aed1dfb5..b178d1786 100644 --- a/docs/output/tokens.md +++ b/docs/output/tokens.md @@ -1,6 +1,6 @@ # Output Path Tokens -When specifying a ROM [writing command](../commands.md) you have to specify an `--output ` directory. `igir` has a few replaceable "tokens" that can be referenced in the `--output ` directory value. This can aid in sorting ROMs into a more complicated directory structure. +When specifying a ROM [writing command](../commands.md) you have to specify an `--output ` directory. Igir has a few replaceable "tokens" that can be referenced in the `--output ` directory value. This can aid in sorting ROMs into a more complicated directory structure. See [output path tokens](./path-options.md) for other options that will further sort your ROMs into subdirectories. @@ -60,14 +60,10 @@ When using [DATs](../dats/introduction.md), you can make use of console & game i - `{datName}` the matching DAT's name, similar to how the [`--dir-dat-name` option](./path-options.md) works - `{datDescription}` the matching DAT's description, similar to how the [`--dir-dat-description` option](./path-options.md) works -- `{region}` each of the ROM's region(s) (e.g. `USA`, `EUR`, `JPN`, `WORLD`) -- `{language}` each of the ROM's language(s) (e.g. `EN`, `ES`, `JA`) - -## Game information - -You can use some information about each game: - -- `{gameType}` the game's "type," one of: `Aftermarket`, `Alpha`, `Bad`, `Beta`, `BIOS`, `Demo`, `Device`, `Fixed`, `Hacked`, `Homebrew`, `Overdump`, `Pending Dump`, `Pirated`, `Prototype`, `Retail` (most games will be this), `Sample`, `Test`, `Trained`, `Translated`, `Unlicensed` +- `{region}` each of the game's region(s) (e.g. `USA`, `EUR`, `JPN`, `WORLD`) +- `{language}` each of the game's language(s) (e.g. `EN`, `ES`, `JA`) +- `{type}` the game's "type," one of: `Aftermarket`, `Alpha`, `Bad`, `Beta`, `BIOS`, `Demo`, `Device`, `Fixed`, `Hacked`, `Homebrew`, `Overdump`, `Pending Dump`, `Pirated`, `Prototype`, `Retail` (most games will be this), `Sample`, `Test`, `Trained`, `Translated`, `Unlicensed` +- `{genre}` the game's "genre" (most DATs don't provide this) ## File information @@ -80,7 +76,7 @@ You can use some information about the input and output file's name & location: ## Specific hardware -To help sort ROMs into unique file structures for popular frontends & hardware, `igir` offers a few specific tokens: +To help sort ROMs into unique file structures for popular frontends & hardware, Igir offers a few specific tokens: - `{adam}` the ['Adam' image](../usage/handheld/adam.md) emulator's directory for the ROM - `{batocera}` the [Batocera](../usage/desktop/batocera.md) emulator's directory for the ROM diff --git a/docs/output/writing-archives.md b/docs/output/writing-archives.md index 742a5e7f8..570288750 100644 --- a/docs/output/writing-archives.md +++ b/docs/output/writing-archives.md @@ -1,12 +1,12 @@ # Writing Zip Archives -`igir` supports creating `.zip` archives with the `igir zip` [command](../commands.md). +Igir supports creating `.zip` archives with the `igir zip` [command](../commands.md). !!! note - It is intentional that `igir` only supports `.zip` archives right now. + It is intentional that Igir only supports `.zip` archives right now. - `.zip` archives store CRC32 information in their "central directory" which helps drastically speed up `igir`'s file scanning, and they are easy to create without proprietary tools (e.g. 7-Zip, Rar). + `.zip` archives store CRC32 information in their "central directory" which helps drastically speed up Igir's file scanning, and they are easy to create without proprietary tools (e.g. 7-Zip, Rar). See the [reading archives](../input/reading-archives.md) page for more information on archive formats and their capabilities. diff --git a/docs/rom-dumping.md b/docs/rom-dumping.md index e73bd4b1c..14ed87166 100644 --- a/docs/rom-dumping.md +++ b/docs/rom-dumping.md @@ -10,7 +10,7 @@ [Dumping.Guide](https://dumping.guide/start) and [Emulation General Wiki](https://emulation.gametechwiki.com/index.php/Ripping_games) are some of the best resources for legally creating ROM files from games you own. -Here is a condensed version that isn't guaranteed to be up-to-date. +Here is a condensed version that isn't guaranteed to be up to date. ## Generation 1-5 cartridge-based consoles diff --git a/docs/roms/filtering-preferences.md b/docs/roms/filtering-preferences.md index 63dd40c33..2d8c0a62f 100644 --- a/docs/roms/filtering-preferences.md +++ b/docs/roms/filtering-preferences.md @@ -1,6 +1,6 @@ # ROM Filtering & Preferences -`igir` offers many options for filtering as well as 1G1R preferences/priorities (when combined with the `--single` option). +Igir offers many options for filtering as well as 1G1R preferences/priorities (when combined with the `--single` option). ROM filters cut down the list of games desired for a set, and any games filtered out will not appear in [reports](../output/reporting.md). ROM preferences decide what duplicates to eliminate (1G1R). @@ -14,7 +14,7 @@ Multiple filter options can be specified at once. --filter-regex , --filter-regex-exclude ``` -Only include, or exclude games based if their DAT name (or filename if not using DATs) matches a regular expression. +Only include or exclude games based if their DAT name (or filename if not using DATs) matches a regular expression. Regex flags can be optionally provided in the form `//`, for example: @@ -50,9 +50,9 @@ Wario Land II (USA, Europe) (SGB Enhanced) Languages are two-letter codes, and multiple languages can be specified with commas between them. See the `--help` message for the full list of understood languages. -If a game does not have language information specified, it will be inferred from the region. +If a game doesnโ€™t have language information specified, it will be inferred from the region. -Here are some example game names that `igir` can parse languages from, including ones with multiple languages: +Here are some example game names that Igir can parse languages from, including ones with multiple languages: ```text English: @@ -82,7 +82,7 @@ A game can have many languages, and all of them are considered during filtering. Regions are two or three-letter codes, and you can specify multiple regions with commas between them. See the `--help` message for the full list of understood regions. -Here are some example game names that `igir` can parse regions from: +Here are some example game names that Igir can parse regions from: ```text USA: @@ -129,7 +129,7 @@ Filter out, or only include games that are marked `bios="yes"` in the DAT, or co --no-device, --only-device ``` -Filter out, or only include [MAME devices](https://wiki.mamedev.org/index.php/MAME_Device_Basics). MAME devices typically represent physical devices, such as microcontrollers, video display controllers, sounds boards, and more. Many MAME devices don't have any associated ROM files. +Filter out or only include [MAME devices](https://wiki.mamedev.org/index.php/MAME_Device_Basics). MAME devices typically represent physical devices, such as microcontrollers, video display controllers, sounds boards, and more. Many MAME devices don't have any associated ROM files. ### Unlicensed @@ -245,7 +245,7 @@ Perfect Dark (USA) (2000-03-22) (Debug) --no-demo, --only-demo ``` -Filter out, or only include games that contain one of the following in their name: +Filter out or only include games that contain one of the following in their name: - `(Demo[a-z0-9. -]*)` (regex) - `@barai` @@ -310,7 +310,7 @@ Sword of Hope, The (Europe) (Proto) --no-program, --only-program ``` -Filter out, or only include games that contain one of the following in their name +Filter out or only include games that contain one of the following in their name - `([a-z0-9. ]*Program)` (regex) - `Check Program` @@ -450,7 +450,7 @@ See the [bad dumps](#bad-dumps) section for more information about "good" and "b Prefer games of certain languages over those in other languages. Multiple languages can be specified, in priority order, with commas between them. See the `--help` message for the full list of understood languages. -If a game does not have language information specified, it will be inferred from the region. +If a game doesnโ€™t have language information specified, it will be inferred from the region. For example, to prefer games in English and _then_ Japanese, the command would be: @@ -475,10 +475,10 @@ For example, to prefer games from: USA (highest priority), "world," and then Eur ### Prefer revision ```text ---prefer-revision-newer, --prefer-revision-older +--prefer-revision ``` -Prefer newer or older revisions of a game. +Prefer newer or older revisions, versions, or ring codes of a game. Revisions can be numeric: @@ -496,27 +496,30 @@ MSR - Metropolis Street Racer (Europe) (En,Fr,De,Es) (Rev A) MSR - Metropolis Street Racer (Europe) (En,Fr,De,Es) (Rev B) ``` -### Prefer retail +Versions can be semantic: ```text ---prefer-retail +F1 World Grand Prix for Dreamcast v1.011 (1999)(Video System)(JP)(en)[!] +F1 World Grand Prix for Dreamcast v1.000 (1999)(Video System)(PAL)(M4)[!] +F1 World Grand Prix v1.006 (2000)(Video System)(US)(M4)[!] ``` -Prefer games that are considered "retail" releases over those that aren't. +Ring codes can be numeric: -See the [only retail](#only-retail) section for more information on what games are considered "retail." +```text +Sonic CD (USA) (RE125) +Sonic CD (USA) (RE125) (Alt) +``` -### Prefer NTSC, PAL +### Prefer retail ```text ---prefer-ntsc, --prefer-pal +--prefer-retail ``` -Prefer games that are explicitly labeled as NTSC or PAL, over those that aren't. - -!!! note +Prefer games that are considered "retail" releases over those that aren't. - Most DAT groups do not label games with this information, generally games are labeled by region instead. +See the [only retail](#only-retail) section for more information on what games are considered "retail." ### Prefer parent diff --git a/docs/roms/headers.md b/docs/roms/headers.md index 1ac75da65..b5dc7b5a9 100644 --- a/docs/roms/headers.md +++ b/docs/roms/headers.md @@ -6,7 +6,7 @@ Some of these headers are used to tell the emulator information about how to emu ## Header detection -`igir` can detect headers for the following consoles and file extensions: +Igir can detect headers for the following consoles and file extensions: | Console | Header | Extension | |--------------------------------|---------------|-----------| @@ -16,13 +16,13 @@ Some of these headers are used to tell the emulator information about how to emu | Nintendo - Famicom Disk System | fsNES/FDS | `.fds` | | Nintendo - SNES | SMC | `.smc` | -Those file extensions above are the commonly accepted "correct" extensions and `igir` will attempt to detect if a header is present in those ROM files automatically. If for some reason your files don't have the right extension (e.g. `.rom`) you can force header detection with the `--header` glob option: +Those file extensions above are the commonly accepted "correct" extensions, and Igir will attempt to detect if a header is present in those ROM files automatically. If for some reason your files don't have the right extension (e.g. `.rom`) you can force header detection with the `--header` glob option: ```shell igir [commands..] --dat --input --header "*.rom" ``` -`igir` will use this detected header information to compute both "headered" and "headerless" checksums of ROMs and use both of those to match against DAT files. +Igir will use this detected header information to compute both "headered" and "headerless" checksums of ROMs and use both of those to match against DAT files. !!! warning @@ -30,7 +30,7 @@ igir [commands..] --dat --input --header "*.rom" ## Manual header removal -Some emulators cannot parse ROMs with headers and instead need a "headerless" version. This seems to be most common with SNES. Sometimes "headerless" files will have a different file extension: +Some emulators cannot parse ROMs with headers and instead need a "headerless" version. This seems most common with SNES. Sometimes "headerless" files will have a different file extension: | Console | Header | Headered
Extension | Headerless
Extension | |--------------------------------|---------------|------------------------|--------------------------| @@ -40,7 +40,7 @@ Some emulators cannot parse ROMs with headers and instead need a "headerless" ve | Nintendo - Famicom Disk System | fsNES/FDS | `.fds` | N/A | | Nintendo - SNES | SMC | `.smc` | `.sfc` | -For every console that `igir` can understand the headers for, it can also remove them with the `--remove-headers` option. This only makes sense for the consoles above with different "headerless" extensions, so you have to specify the extensions like this: +For every console that Igir can understand the headers for, it can also remove them with the `--remove-headers` option. This only makes sense for the consoles above with different "headerless" extensions, so you have to specify the extensions like this: ```shell igir [commands..] --dat --input --remove-headers .lnx,.smc @@ -54,6 +54,6 @@ igir [commands..] --dat --input --remove-headers ## Automatic header removal -Some DAT groups such as No-Intro publish "headered" and "headerless" DATs for the same console, such as NES. `igir` will treat these DATs differently, it will automatically remove headers (if present) for "headerless" DATs, and leave the header intact for "headered" DATs (ignoring the `--remove-headers` option completely). +Some DAT groups such as No-Intro publish "headered" and "headerless" DATs for the same console, such as NES. Igir will treat these DATs differently, it will automatically remove headers (if present) for "headerless" DATs, and leave the header intact for "headered" DATs (ignoring the `--remove-headers` option completely). As explained above, you almost always want the "headered" version. It's only in very specific circumstances that you might need the "headerless" version. diff --git a/docs/roms/matching.md b/docs/roms/matching.md index f2f5d69b0..6a8445d91 100644 --- a/docs/roms/matching.md +++ b/docs/roms/matching.md @@ -1,8 +1,8 @@ # ROM Matching -When `igir` [scans ROM files](../input/file-scanning.md) in the input directory, it calculates a number of checksums to uniquely identify each file. These checksums are then matched to ones found in [DATs](../dats/introduction.md). +When Igir [scans ROM files](../input/file-scanning.md) in the input directory, it calculates a number of checksums to uniquely identify each file. These checksums are then matched to ones found in [DATs](../dats/introduction.md). -By default, `igir` will use CRC32 + filesize to match input files to ROMs found in DATs. CRC32 checksums are fast to calculate, and many [archive formats](../input/reading-archives.md) include them in their directory of files, which greatly speeds up scanning. +By default, Igir will use CRC32 + filesize to match input files to ROMs found in DATs. CRC32 checksums are fast to calculate, and many [archive formats](../input/reading-archives.md) include them in their directory of files, which greatly speeds up scanning. !!! note @@ -10,13 +10,15 @@ By default, `igir` will use CRC32 + filesize to match input files to ROMs found ## Automatically using other checksum algorithms -Some DAT release groups do not include every checksum for every file. For example, MAME CHDs only include SHA1 checksums and nothing else, not even filesize information. +Some DAT release groups do not include every checksum for every file. For example, CHDs in MAME DATs only include SHA1 checksums and nothing else, not even filesize information. And some DAT release groups do not include filesize information for every file, preventing a safe use of CRC32. For example, not every [Hardware Target Game Database SMDB](https://github.com/frederic-mahe/Hardware-Target-Game-Database/tree/master/EverDrive%20Pack%20SMDBs) includes file sizes, but they typically include all the normal checksums. -!!! success +!!! warning - For situations like these, `igir` will automatically detect what combination of checksums it needs to calculate for input files to be able to match them to DATs. This has the chance of greatly slowing down file scanning, especially with archives. + For situations like these, Igir will automatically detect what combination of checksums it needs to calculate for input files to be able to match them to DATs. This _does_ have the chance of greatly slowing down file scanning, especially with archives. + + To constrain what checksums are calculated, you can use the `--input-checksum-quick` option (below), or `--input-checksum-max ` which accepts the same algorithm options as `--input-checksum-min ` (also below). For example, if you provide all of these DATs at once with the [`--dat ` option](../dats/processing.md): @@ -24,11 +26,25 @@ For example, if you provide all of these DATs at once with the [`--dat ` o - Hardware Target Game Database's Atari Lynx SMBD (which includes CRC32, MD5, SHA1, and SHA256 information but _not_ filesize) - MAME ListXML (which only includes SHA1 information for CHD "disks") -...then `igir` will determine that SHA1 is necessary to calculate because not every ROM in every DAT includes CRC32 _and_ filesize information. +...then Igir will determine that SHA1 is the minimum necessary checksum to calculate because not every ROM in every DAT includes CRC32 _and_ filesize information. !!! note - When generating a [dir2dat](../dats/dir2dat.md) with the `igir dir2dat` command, `igir` will calculate CRC32, MD5, and SHA1 information for every file. This helps ensure that the generated DAT has the most complete information it can. You can additionally add SHA256 information with the option `igir [commands..] [options] --input-min-checksum SHA256` (below). + When generating a [dir2dat](../dats/dir2dat.md) with the `igir dir2dat` command, Igir will calculate CRC32, MD5, and SHA1 information for every file. This helps ensure that the generated DAT has the most complete information it can. You can additionally add SHA256 information with the option `igir [commands..] [options] --input-checksum-min SHA256` (below). + +## Quick scanning files + +A number of archives formats require the extraction of files to calculate their checksums, and this extraction can greatly increase scanning time and add hard drive wear & tear. Igir's default settings will give you the best chance of matching input files to DATs, but there may be situations where you want to make scanning faster. + +The `--input-checksum-quick` option will prevent the extraction of archives (either in memory _or_ using temporary files) to calculate checksums of files contained inside. This means that Igir will rely solely on the information available in the archive's file directory. Non-archive files will still have their checksum calculated as normal. See the [archive formats](../input/reading-archives.md) page for more information about what file types contain what checksum information. + +!!! warning + + If an archive format doesn't contain any checksum information (e.g. `.cso`, `.tar.gz`), then there will be no way to match those input files to DATs when quick scanning! Only use quick scanning when all input archives store checksums of their files! + +!!! warning + + Different DAT groups catalog CHDs of CD-ROMs (`.bin` & `.cue`) and GD-ROMs (`.gdi` & `.bin`/`.raw`) that use a track sheet plus one or more track files differnetly. Take the Sega Dreamcast for example, Redump catalogs `.bin` & `.cue` files (which is [problematic with CHDs](https://github.com/mamedev/mame/issues/11903)), [MAME Redump](https://github.com/MetalSlug/MAMERedump) catalogs `.chd` CD files, and TOSEC catalogs `.gdi` & `.bin`/`.raw` files. Quick scanning of CHDs means only the SHA1 stored in its header will be used for matching, which may or may not work depending on the DATs you use. ## Manually using other checksum algorithms @@ -36,17 +52,17 @@ For example, if you provide all of these DATs at once with the [`--dat ` o Most people do not need to calculate checksums above CRC32. CRC32 + filesize is sufficient to match ROMs and test written files in the gross majority of cases. The below information is for people that _truly_ know they need higher checksums. -You can specify higher checksum algorithms with the `--input-min-checksum ` option like this: +You can specify higher checksum algorithms with the `--input-checksum-min ` option like this: ```shell -igir [commands..] [options] --input-min-checksum MD5 -igir [commands..] [options] --input-min-checksum SHA1 -igir [commands..] [options] --input-min-checksum SHA256 +igir [commands..] [options] --input-checksum-min MD5 +igir [commands..] [options] --input-checksum-min SHA1 +igir [commands..] [options] --input-checksum-min SHA256 ``` -This option defines the _minimum_ checksum that will be used based on digest size (below). If not every ROM in every DAT provides the checksum you specify, `igir` may automatically calculate and match files based on a higher checksum (see above). +This option defines the _minimum_ checksum that will be used based on digest size (below). If not every ROM in every DAT provides the checksum you specify, Igir may automatically calculate and match files based on a higher checksum (see above), but never lower. -The reason you might want to do this is to have a higher confidence that found files _exactly_ match ROMs in DATs. Just keep in mind that explicitly enabling non-CRC32 checksums will _greatly_ slow down scanning of files within archives. +The reason you might want to do this is to have a higher confidence that found files _exactly_ match ROMs in DATs. Keep in mind that explicitly enabling non-CRC32 checksums will _greatly_ slow down scanning of files within archives (see "quick scanning" above). Here is a table that shows the keyspace for each checksum algorithm, where the higher number of bits reduces the chances of collisions: @@ -57,4 +73,4 @@ Here is a table that shows the keyspace for each checksum algorithm, where the h | SHA1 | 160 bits | 2^160 = 1.46 quindecillion | `666d29a15d92f62750dd665a06ce01fbd09eb98a` | | SHA256 | 256 bits | 2^256 = 115.79 quattuorvigintillion | `1934e26cf69aa49978baac893ad5a890af35bdfb2c7a9393745f14dc89459137` | -When files are [tested](../commands.md#test) after being written, `igir` will use the highest checksum available from the scanned file to check the written file. This lets you have equal confidence that a file was written correctly as well as matched correctly. +When files are [tested](../commands.md#test) after being written, Igir will use the highest checksum available from the scanned file to check the written file. This lets you have equal confidence that a file was written correctly as well as matched correctly. diff --git a/docs/roms/patching.md b/docs/roms/patching.md index c0cee1a6d..7f9f6376a 100644 --- a/docs/roms/patching.md +++ b/docs/roms/patching.md @@ -2,7 +2,7 @@ Patches contain a set of changes that can be applied to a file, turning that file into something different. Common examples for patching ROMs are: translating text to a different language but keeping game logic the same, and fan-made creations such as new levels for an existing game. -Games and their ROMs are protected under copyrights, so patches are used in order to not share copyrighted code online. A person needs the original ROM file plus a patch file in order to get the resulting patched ROM that will be played with an emulator. +Games and their ROMs are protected under copyrights, so patches are used to not share copyrighted code online. A person needs the original ROM file plus a patch file to get the resulting patched ROM that will be played with an emulator. ## Specifying patch files @@ -12,7 +12,7 @@ Patch files can be specified with the `--patch ` option. See the [file sca There are many, _many_ patch types that ROM hackers use to distribute their changes on the internet ([xkcd "Standards"](https://xkcd.com/927/)). Typically, a patch will only be distributed in one format, so gamers are entirely at the mercy of the ROM hacker's choice. -Not all patch types are created equal. Here are some tables of some existing formats, whether `igir` supports them, and what the patch supports. +Not all patch types are created equal. Here are some tables of some existing formats, whether Igir supports them, and what the patch supports. **Common patch types:** @@ -46,7 +46,7 @@ If you have a choice in patch format, choose one that contains CRC32 checksums i ## ROM checksums -`igir` needs to be able to know what source ROM each patch file applies to, and it does this using CRC32 checksums. +Igir needs to be able to know what source ROM each patch file applies to, and it does this using CRC32 checksums. A few patch formats include the source ROM's CRC32 checksum in the patch's file contents. This is the most accurate and therefore the best way to get source ROM information. `.bps` is a great example of an efficient and simple patch format that includes this information. diff --git a/docs/static b/docs/static new file mode 120000 index 000000000..4dab1644d --- /dev/null +++ b/docs/static @@ -0,0 +1 @@ +../static \ No newline at end of file diff --git a/docs/usage/arcade.md b/docs/usage/arcade.md index db1a56181..d5f4f76b9 100644 --- a/docs/usage/arcade.md +++ b/docs/usage/arcade.md @@ -31,7 +31,7 @@ Here is a chart of instructions for various setups: | [FinalBurn Neo](https://github.com/finalburnneo/FBNeo) | FinalBurn Neo doesn't provide an obvious way to find the correct DAT for each version. But it is likely that you are using FinalBurn Neo through a frontend, so use the above instructions. | N/A | | [FinalBurn Alpha](https://www.fbalpha.com/) | FinalBurn Alpha was forked into FinalBurn Neo, so you should use that if possible. Otherwise, hopefully your frontend's documentation has links to download the correct DAT. | N/A | -## ROM set types +## ROM set merge types There are three broadly accepted types of ROM sets, with one extra variation, resulting in four types. @@ -82,6 +82,12 @@ The ROM merge type can be specified with the `--merge-roms ` option: --merge-roms split ``` +## CHD disks + +As arcade machines got more complicated, their storage requirements grew beyond what ROM chips can handle cost effectively. Cabinets started embedding hard drives, optical drives, laser disc drives, and more. Because backup images of these media types can get large, the MAME developers created a new compression format called "compressed hunks of data" (CHD). + +MAME DATs catalog these "disks" separately from "ROMs," which lets users choose whether to care about them or not. Typically, games that require disks will not run without them, so Igir requires them for a game to be considered present/complete. You can use the `--exclude-disks` option to exclude disks and only process ROMs to save some space. + ## Example: building a new ROM set Let's say we want to build an arcade ROM set that's compatible with the most recent version of [RetroArch](desktop/retroarch.md). The steps would look like this: @@ -106,7 +112,7 @@ Let's say we want to build an arcade ROM set that's compatible with the most rec Let's say we care first and foremost that the arcade games "just work," and then we would like to conserve disk space. A "split" ROM set makes a good choice because RetroArch should be able to automatically index every game, including both parents and clones. -6. **Run `igir`.** +6. **Run Igir.** !!! note @@ -160,11 +166,11 @@ Most other ROM managers use the terms "re-build" & "fix" when talking about taki !!! note - A game's required ROM files may change between emulator versions. This usually occurs when bad ROM dumps are replaced with better dumps. `igir` cannot magically deal with these ROM differences, and `igir` will only write complete ROM sets, so you may see games disappear when re-building. You will need to source the differing ROM files in order to keep your full game set. + A game's required ROM files may change between emulator versions. This usually occurs when bad ROM dumps are replaced with better dumps. Igir cannot magically deal with these ROM differences, and Igir will only write complete ROM sets, so you may see games disappear when re-building. You will need to source the differing ROM files in order to keep your full game set. -A major reason `igir` was created was to help disambiguate what it means to build & re-build ROM sets. `igir` explicitly requires users to choose whether ROM files are copied or moved, so that users know what decision they are making. To "re-build" a ROM set, a user just needs to `igir move` ROMs from an input directory to the same directory specified again as the output. +A major reason Igir was created was to help disambiguate what it means to build & re-build ROM sets. Igir explicitly requires users to choose whether ROM files are copied or moved, so that users know what decision theyโ€™re making. To "re-build" a ROM set, a user just needs to `igir move` ROMs from an input directory to the same directory specified again as the output. -Taking the MAME v0.258 set we created above, let's say we want to "downgrade" it to MAME 2003 (v0.78) because an under-powered device requires it. The steps would look like this: +Taking the MAME v0.258 set we created above, let's say we want to "downgrade" it to MAME 2003 (v0.78) because an underpowered device requires it. The steps would look like this: 1. **Locate or download the emulator version's DAT.** @@ -176,7 +182,7 @@ Taking the MAME v0.258 set we created above, let's say we want to "downgrade" it This is left as an exercise for the reader. -3. **Run `igir`.** +3. **Run Igir.** === ":simple-windowsxp: Windows (64-bit)" diff --git a/docs/usage/collection-sorting.md b/docs/usage/basic.md similarity index 53% rename from docs/usage/collection-sorting.md rename to docs/usage/basic.md index 39207cd95..e0593648d 100644 --- a/docs/usage/collection-sorting.md +++ b/docs/usage/basic.md @@ -1,4 +1,4 @@ -# Example Collection Sorting +# Basic Usage Examples A walkthrough of an example way to sort your ROM collection. @@ -6,11 +6,15 @@ A walkthrough of an example way to sort your ROM collection. See the `igir --help` message for a few common examples. -## First time collection sort +## With DATs + +Even though Igir can work without [DATs](../dats/introduction.md), using DATs to sort your collection is the [best practice](best-practices.md) to end up with the most accurate and organized set of ROMs. + +### First time collection sort First, you need to download a set of [DATs](../dats/introduction.md). For these examples I'll assume you downloaded a No-Intro daily P/C XML `.zip`. -Let's say that you have a directory named `ROMs/` that contains ROMs for many different systems, and it needs some organization. To make sure we're alright with the output, we'll have `igir` copy these files rather than move them. We'll also zip them to reduce disk space & speed up future scans. +Let's say that you have a directory named `ROMs/` that contains ROMs for many different systems, and it needs some organization. To make sure we're alright with the output, we'll have Igir copy these files to a different directory rather than move them. We'll also [zip](../output/writing-archives.md) them to reduce disk space & speed up future scans. === ":simple-windowsxp: Windows" @@ -42,12 +46,12 @@ Let's say that you have a directory named `ROMs/` that contains ROMs for many di --dir-dat-name ``` -This will organize your ROMs into system-specific subdirectories within `ROMs-Sorted/` and name all of your ROMs accurately. Because we copied and didn't move, no files were deleted from the `ROMs/` input directory. +This will organize your ROMs into system-specific subdirectories within `ROMs-Sorted/` and name all of your ROMs according to the No-Intro DATs. Because we copied and didn't move the files, no files were deleted from the `ROMs/` input directory. `ROMs-Sorted/` then might look something like this: ```text -ROMs-Sorted +ROMs-Sorted/ โ”œโ”€โ”€ Nintendo - Game Boy โ”‚ โ”œโ”€โ”€ Pokemon - Blue Version (USA, Europe) (SGB Enhanced).zip โ”‚ โ””โ”€โ”€ Pokemon - Yellow Version - Special Pikachu Edition (USA, Europe) (CGB+SGB Enhanced).zip @@ -59,13 +63,17 @@ ROMs-Sorted โ””โ”€โ”€ Pokemon Pinball (USA, Australia) (Rumble Version) (SGB Enhanced) (GB Compatible).zip ``` -[![asciicast](https://asciinema.org/a/rOWJwgbbODaXuQeQY4B6uWc4i.svg)](https://asciinema.org/a/rOWJwgbbODaXuQeQY4B6uWc4i) + + +!!! info + + See the [output path options](../output/path-options.md) and [output path tokens](../output/tokens.md) pages for other ways that you can organize your collection. -## Subsequent collection sorts +### Subsequent collection sorts -Let's say that we've done the above first time sort and were happy with the results. We can now consider the `ROMs-Sorted/` directory to be our primary collection, every file in there has been matched to a DAT. +Let's say that we've done the above first time sort and were happy with the results. We can now consider the `ROMs-Sorted/` directory to be our "golden" or "primary" collection, as every file in there has been matched to a DAT. -Now we have new ROMs that we want to merge into our collection, and we want to generate a [report](../output/reporting.md) of what ROMs are still missing. We also want to delete any unknown files that may have made their way into our collection. +We now have new ROMs that we want to newly merge into our collection, and we want to generate a [report](../output/reporting.md) of what ROMs are still missing. We also want to "[clean](../output/cleaning.md)" or delete any unknown files that may have made their way into our collection. === ":simple-windowsxp: Windows" @@ -102,10 +110,16 @@ Now we have new ROMs that we want to merge into our collection, and we want to g Any new ROMs in `ROMs-New/` that we didn't already have in `ROMs-Sorted/` will be moved, and a report will be generated for us. +!!! note + + Note that we're using `ROMs-Sorted/` as both an input directory _and_ as the output directory. This is required to ensure the [`clean` command](../output/cleaning.md) doesn't delete "good" files already in the output directory! + + You can always use the [`--clean-dry-run` option](../output/cleaning.md#dry-run) to see what files would be deleted without actually deleting them. + `ROMs-Sorted/` then might look something like this, with new ROMs added: ```text -ROMs-Sorted +ROMs-Sorted/ โ”œโ”€โ”€ Nintendo - Game Boy โ”‚ โ”œโ”€โ”€ Pokemon - Blue Version (USA, Europe) (SGB Enhanced).zip โ”‚ โ”œโ”€โ”€ Pokemon - Red Version (USA, Europe) (SGB Enhanced).zip @@ -119,13 +133,13 @@ ROMs-Sorted โ””โ”€โ”€ Pokemon Pinball (USA, Australia) (Rumble Version) (SGB Enhanced) (GB Compatible).zip ``` -[![asciicast](https://asciinema.org/a/PWAfBcvCikzJ7wObLcdFGtZbI.svg)](https://asciinema.org/a/PWAfBcvCikzJ7wObLcdFGtZbI) + -## Flash cart 1G1R +### Flash cart 1G1R Let's say we've done the above sorting we want to copy some ROMs from `ROMs-Sorted/` to a flash cart. -We would prefer having only one copy of every game (1G1R), so there is less to scroll through to find what we want, and because we have a preferred language. Our flash cart can't read `.zip` files, so we'll need to extract our ROMs during copying. +We would prefer having only one copy of every game ([1G1R](../roms/filtering-preferences.md#preferences-for-1g1r)), because we have a preferred language, and so there is less to scroll through to find what game we want. Our flash cart can't read `.zip` files, so we'll need to extract our ROMs during copying. === ":simple-windowsxp: Windows" @@ -185,8 +199,85 @@ Your flash cart might then look something like this: โ””โ”€โ”€ Pokemon - Yellow Version - Special Pikachu Edition (USA, Europe) (CGB+SGB Enhanced).gb ``` -[![asciicast](https://asciinema.org/a/K8ROFbX8c4NJfUue3lwbe7d8V.svg)](https://asciinema.org/a/K8ROFbX8c4NJfUue3lwbe7d8V) + !!! info See the [ROM filtering & preference](../roms/filtering-preferences.md) page for other ways that you can filter your collection. + +## Without DATs + +ROM organization is very opinion-based, and your opinion may not match that of DAT groups. To preserve your custom ROM sorting, you can skip providing any DATs by omitting the `--dat ` option. + +!!! note + + If your custom ROM sorting includes directories, you will want to provide the [`--dir-mirror` option](../output/path-options.md#mirror-the-input-subdirectory) to preserve the structure. + +### Extracting or zipping all ROMs + +It is possible to extract or zip your ROM files en masse without complicated Bash or Batch scripts, and you can do this without DATs because the root of the filename won't change. + +=== ":simple-windowsxp: Windows" + + ```batch + igir move extract test ^ + --input "ROMs\" ^ + --output "ROMs\" ^ + --dir-mirror + ``` + +=== ":simple-apple: macOS" + + ```shell + igir move extract test \ + --input "ROMs/" \ + --output "ROMs/" \ + --dir-mirror + ``` + +=== ":simple-linux: Linux" + + ```shell + igir move extract test \ + --input "ROMs/" \ + --output "ROMs/" \ + --dir-mirror + ``` + + + +### Fixing file extensions + +Igir is able to detect more than 50 ROM and archive file types and automatically correct file extensions when needed during writing. See the [writing options](../output/options.md#fixing-rom-extensions) page for more information. + +=== ":simple-windowsxp: Windows" + + ```batch + igir move extract test ^ + --input "ROMs\" ^ + --output "ROMs\" ^ + --dir-mirror ^ + --fix-extension always + ``` + +=== ":simple-apple: macOS" + + ```shell + igir move extract test \ + --input "ROMs/" \ + --output "ROMs/" \ + --dir-mirror \ + --fix-extension always + ``` + +=== ":simple-linux: Linux" + + ```shell + igir move extract test \ + --input "ROMs/" \ + --output "ROMs/" \ + --dir-mirror \ + --fix-extension always + ``` + + diff --git a/docs/usage/best-practices.md b/docs/usage/best-practices.md new file mode 100644 index 000000000..01591d4e9 --- /dev/null +++ b/docs/usage/best-practices.md @@ -0,0 +1,89 @@ +# Best Practices + +**Use an installation method that auto-updates.** + +Downloading bundled binaries from GitHub is the most difficult way to receive updates to Igir. See the [installation page](../installation.md) for options available to you. + +## DATs + +**Use DATs.** + +While [DATs](../dats/introduction.md) are optional, they allow you to organize your ROMs in a human-understandable manner while trimming out unknown files. Additional metadata provided by some DAT groups allows you [filter your ROM set](../roms/filtering-preferences.md) to only what you care about. + +**Choose DAT groups with parent/clone information.** + +[Parent/clone information](../dats/introduction.md#parentclone-pc-dats) lets you apply [1G1R preference rules](../roms/filtering-preferences.md). For example, prefer No-Intro's Game Boy DAT over TOSEC's, as TOSEC doesn't provide parent/clone information. + +**Use consistent versions across all devices.** + +DATs work best if you store them alongside your primary ROM collection, and when you use the same DAT versions across all devices (i.e. your primary collection, handhelds, flash carts, etc.). Some DAT groups release new versions as often as daily, so keeping your collection in sync is easier with consistent DATs. + +**Process DATs from different groups separately.** + +DAT groups have some overlap between them, so using DATs from multiple groups at the same time may cause duplicate files or filename collisions. Different groups also have different conventions that may require different settings, such as [filters](../roms/filtering-preferences.md#filters) and [1G1R preferences](../roms/filtering-preferences.md#preferences-for-1g1r). + +Also, keep ROM sets organized by DATs from different groups in separate directories. For example, create different directories for No-Intro, Redump, and TOSEC-organized ROM sets. + +## File Inputs + +**Keep one primary collection and then copy to other sub-collections.** + +Provide your output directory as one of the input directories, and then any other input directories you wish to copy or move into your primary collection. Doing so will let you [clean the output directory](../output/cleaning.md) safely. + +Then, create sub-collections by copying files from your main collection to other devices, optionally applying [filtering and preference rules](../roms/filtering-preferences.md). + +**Prefer ROMs with headers.** + +Igir can [remove headers automatically](../roms/headers.md#automatic-header-removal) when needed, but it cannot add them back. Keep ROMs with headers in your primary collection and then modify them when copying to other devices as needed. + +**Don't use quick scanning unless you absolutely need it.** + +The default settings for Igir will have the best chance for you to match input files to DATs. Using the [`--input-checksum-quick` option](../roms/matching.md#quick-scanning-files) will reduce those chances. + +**Don't increase the minimum checksum level unless you absolutely need it.** + +The default settings for Igir will cause accurate file matching for the gross majority of cases with the least amount of processing. Additionally, most [archive formats](../input/reading-archives.md) only store CRC32 checksums, so forcing any others will greatly increase scanning time. Use the `--input-checksum-min ` option with caution. + +## File Outputs + +**Zip ROMs wherever possible.** + +Zip files generally save file space and are faster to scan, at the expense of more time to create them. For collections that will be read from more often than written to, such as a primary collection, prefer to eat the cost of [archiving files](../output/writing-archives.md) once with the `igir zip` command. + +**Organize ROM sets by DAT name or description.** + +Ignoring [arcade ROM sets](../usage/arcade.md), one purpose of sorting your ROM collection using DATs is to organize them in some human-understandable manner. A common way to help with this is to group ROMs from the same console together using [`--dir-dat-name`](../output/path-options.md#append-dat-name) or [`--dir-dat-description`](../output/path-options.md#append-dat-description)` + +Alternatively, you can [filter to only the DATs](../dats/processing.md#dat-filtering) you want, and then [combine them together](../dats/processing.md#dat-combining) and write the resulting ROMs to one directory. + +**Organize ROMs by letter for non-keyboard & mouse devices.** + +Devices that only have a D-pad to browse through files can make ROM selection tedious. Use the [`--dir-letter` option](../output/path-options.md#append-game-letters) and its `--dir-letter-*` modifier options to make this easier with large collections. + +**Use the default game name appending option.** + +Igir will automatically group games with multiple ROMs together into their own subfolder. Leave this [`--dir-game-subdir ` option](../output/path-options.md#append-the-game-name) as the default unless you know what you're doing. + +**Overwrite invalid files.** + +If you value keeping a clean and accurate ROM collection, use the [`--overwrite-invalid` option](../output/options.md) to overwrite files in the output directory that don't match what's expected with a "valid" file. + +## Arcade + +**Use the right DAT version for your emulator version.** + +You must choose the right DAT for your emulator (e.g. MAME) and emulator version (e.g. MAME 0.258) or your ROMs may not work correctly. See the [arcade ROM sets page](../usage/arcade.md#emulator-versions--dats) for more information. + +**For MAME, use the official DATs or ones from progetto-SNAPS.** + +These DATs provide the most flexibility (i.e. can use any merge type) and the most amount of metadata (i.e. [parent/clone information](../dats/introduction.md#parentclone-pc-dats), ROMs and CHDs together in one DAT) for Igir to use for processing. Other DAT groups such as pleasuredome modify the official DATs quite heavily by pre-applying filters. + +**Pick a ROM merge type intentionally.** + +Igir will produce full non-merged sets by default for the highest level of compatability. However, you should understand the difference between the supported [merge types](../usage/arcade.md#rom-set-merge-types) and choose one that best suits your needs. + +## Advanced + +**Use an SSD or a RAM drive for the temp directory.** + +Igir sometimes needs to write files to a [temporary directory](../advanced/temp-dir.md), such as when extracting archives that it [can't read natively](../input/reading-archives.md). Using a fast hard drive for this directory can speed up processing. diff --git a/docs/usage/console/gamecube.md b/docs/usage/console/gamecube.md index 7f5ae90c2..c2f5d3fbe 100644 --- a/docs/usage/console/gamecube.md +++ b/docs/usage/console/gamecube.md @@ -8,17 +8,17 @@ Swiss is sensitive to files being fragmented on SD cards ([swiss-gc#763](https://github.com/emukidid/swiss-gc/issues/763), [swiss-gc#122](https://github.com/emukidid/swiss-gc/issues/122), etc.). This means that you should only write one ISO at a time! -`igir` has a `--writer-threads` option to limit the number of files being written at once. You can use the option like this: +Igir has a `--writer-threads ` option to limit the number of files being written at once. You can use the option like this: === ":simple-windowsxp: Windows" Replace the `E:\` drive letter with wherever your SD card is: ```batch - igir copy extract test clean ^ + igir copy test clean ^ --dat "Redump*.zip" ^ --dat-name-regex '/gamecube/i' ^ - --input "ISOs" ^ + --input "Games" ^ --output "E:\Games" ^ --dir-letter ^ --writer-threads 1 @@ -29,10 +29,10 @@ Replace the `/Volumes/SD2SP2` drive name with whatever your SD card is named: ```shell - igir copy extract test clean \ + igir copy test clean \ --dat "Redump*.zip" \ --dat-name-regex '/gamecube/i' \ - --input "ISOs/" \ + --input "Games/" \ --output "/Volumes/SD2SP2/Games/" \ --dir-letter \ --writer-threads 1 @@ -43,11 +43,17 @@ Replace the `/media/SD2SP2` path with wherever your SD card is mounted: ```shell - igir copy extract test clean \ + igir copy test clean \ --dat "Redump*.zip" \ --dat-name-regex '/gamecube/i' \ - --input "ISOs/" \ + --input "Games/" \ --output "/media/SD2SP2/Games/" \ --dir-letter \ --writer-threads 1 ``` + +## NKit + +Swiss supports ISOs in the trimmed [NKit format](https://wiki.gbatemp.net/wiki/NKit), which can save significant space on your SD card. Some games such as Animal Crossing can be compressed as small as 28MB, while other games such as Wave Race: Blue Storm don't compress much at all. + +Igir can read the original ISO's CRC32 information stored in `.nkit.iso` files, which means it can match files to DATs (as long as you don't raise the minimum checksum level!). However, Igir can't extract NKit ISOs, you'll need to use Nanook's [NKit tool](https://wiki.gbatemp.net/wiki/NKit#Download) instead. diff --git a/docs/usage/console/ps2.md b/docs/usage/console/ps2.md index 465807c72..66df8d3b5 100644 --- a/docs/usage/console/ps2.md +++ b/docs/usage/console/ps2.md @@ -8,7 +8,7 @@ OPL is sensitive to files being fragmented on USB drives and SD cards (MX4SIO/SIO2SD). This means that you should only write one ISO at a time! -`igir` has a `--writer-threads` option to limit the number of files being written at once. You can use the option like this: +Igir has a `--writer-threads ` option to limit the number of files being written at once. You can use the option like this: === ":simple-windowsxp: Windows" diff --git a/docs/usage/desktop/batocera.md b/docs/usage/desktop/batocera.md index 837d9accc..eb8f5f8ee 100644 --- a/docs/usage/desktop/batocera.md +++ b/docs/usage/desktop/batocera.md @@ -19,7 +19,7 @@ Because Batocera uses RetroArch under the hood, the instructions are generally t ## ROMs -Batocera uses its own proprietary [ROM folder structure](https://wiki.batocera.org/systems), so `igir` has a replaceable `{batocera}` token to sort ROMs into the right place. See the [replaceable tokens page](../../output/tokens.md) for more information. +Batocera uses its own proprietary [ROM folder structure](https://wiki.batocera.org/systems), so Igir has a replaceable `{batocera}` token to sort ROMs into the right place. See the [replaceable tokens page](../../output/tokens.md) for more information. === ":simple-linux: Batocera (Linux)" diff --git a/docs/usage/desktop/emuelec.md b/docs/usage/desktop/emuelec.md index 73b335344..77efe28c8 100644 --- a/docs/usage/desktop/emuelec.md +++ b/docs/usage/desktop/emuelec.md @@ -21,4 +21,4 @@ Because EmuELEC is mostly Libretro under the hood, the instructions are generall !!! failure - EmuELEC uses its own proprietary [ROM folder structure](https://github.com/EmuELEC/EmuELEC/wiki/Supported-Platforms-And--Correct-Rom-Path). `igir` does not support this folder structure, yet. + EmuELEC uses its own proprietary [ROM folder structure](https://github.com/EmuELEC/EmuELEC/wiki/Supported-Platforms-And--Correct-Rom-Path). Igir does not support this folder structure, yet. diff --git a/docs/usage/desktop/emulationstation.md b/docs/usage/desktop/emulationstation.md index 74ef1f08c..4e664332f 100644 --- a/docs/usage/desktop/emulationstation.md +++ b/docs/usage/desktop/emulationstation.md @@ -23,7 +23,7 @@ Other emulators may use other names for their BIOS images but all reside in the ## ROMs -EmulationStation uses its own proprietary ROM folder structure, so `igir` has a replaceable `{es}` token to sort ROMs into the right place. See the [replaceable tokens page](../../output/tokens.md) for more information. +EmulationStation uses its own proprietary ROM folder structure, so Igir has a replaceable `{es}` token to sort ROMs into the right place. See the [replaceable tokens page](../../output/tokens.md) for more information. === ":simple-linux: EmulationStation (Linux)" diff --git a/docs/usage/desktop/launchbox.md b/docs/usage/desktop/launchbox.md index 6d3559c35..5e4c82dd4 100644 --- a/docs/usage/desktop/launchbox.md +++ b/docs/usage/desktop/launchbox.md @@ -6,4 +6,4 @@ LaunchBox uses [RetroArch](retroarch.md) for its game emulation by default, as o !!! failure - LaunchBox has its own ROM importing mechanism that copies files to `\Games\*\*` in your install directory (so `%USERPROFILE%\LaunchBox\Games\*\*` by default). There _is_ a mechanism to scan for ROMs added to these folders manually, but they must be sorted into the correct "platform" folder. LaunchBox doesn't have documentation cataloging these "platform" folders, so `igir` does not currently support them. + LaunchBox has its own ROM importing mechanism that copies files to `\Games\*\*` in your install directory (so `%USERPROFILE%\LaunchBox\Games\*\*` by default). There _is_ a mechanism to scan for ROMs added to these folders manually, but they must be sorted into the correct "platform" folder. LaunchBox doesn't have documentation cataloging these "platform" folders, so Igir does not currently support them. diff --git a/docs/usage/desktop/openemu.md b/docs/usage/desktop/openemu.md index 53772984e..329fa6790 100644 --- a/docs/usage/desktop/openemu.md +++ b/docs/usage/desktop/openemu.md @@ -4,4 +4,4 @@ !!! failure - OpenEmu has its own ROM importing mechanism that copies files to `~/Library/Application Support/OpenEmu/Game Library/roms` and adds them to a database. OpenEmu will _not_ automatically scan files you place into this folder, so `igir` is unable to help sort them. + OpenEmu has its own ROM importing mechanism that copies files to `~/Library/Application Support/OpenEmu/Game Library/roms` and adds them to a database. OpenEmu will _not_ automatically scan files you place into this folder, so Igir is unable to help sort them. diff --git a/docs/usage/desktop/retroarch.md b/docs/usage/desktop/retroarch.md index fbca65be1..cdbfeb1c1 100644 --- a/docs/usage/desktop/retroarch.md +++ b/docs/usage/desktop/retroarch.md @@ -10,7 +10,7 @@ First, RetroArch needs a number of [BIOS files](https://docs.libretro.com/library/bios/). Thankfully, the libretro team maintains a DAT of these "system" files, so we don't have to guess at the correct filenames. -With `igir`'s support for [DAT URLs](../../dats/processing.md#scanning-for-dats) we don't even have to download the DAT! Locate your "System/BIOS" directory as configured in the RetroArch UI and use it as your output directory: +With Igir's support for [DAT URLs](../../dats/processing.md#scanning-for-dats) we don't even have to download the DAT! Locate your "System/BIOS" directory as configured in the RetroArch UI and use it as your output directory: === ":simple-windowsxp: Windows (64-bit)" diff --git a/docs/usage/desktop/retrodeck.md b/docs/usage/desktop/retrodeck.md index 7d25d5b94..64ed47ff4 100644 --- a/docs/usage/desktop/retrodeck.md +++ b/docs/usage/desktop/retrodeck.md @@ -21,7 +21,7 @@ Other emulators may use other names for their BIOS images but all reside in the ## ROMs -RetroDECK uses its own proprietary ROM folder structure, so `igir` has a replaceable `{retrodeck}` token to sort ROMs into the right place. See the [replaceable tokens page](../../output/tokens.md) for more information. +RetroDECK uses its own proprietary ROM folder structure, so Igir has a replaceable `{retrodeck}` token to sort ROMs into the right place. See the [replaceable tokens page](../../output/tokens.md) for more information. === ":simple-linux: RetroDECK (Linux)" diff --git a/docs/usage/desktop/romm.md b/docs/usage/desktop/romm.md index af4fc47a0..80d22fdd3 100644 --- a/docs/usage/desktop/romm.md +++ b/docs/usage/desktop/romm.md @@ -4,7 +4,7 @@ ## ROMs -RomM uses its own [proprietary ROM folder structure](https://github.com/rommapp/romm/wiki/Supported-Platforms), so `igir` has a replaceable `{romm}` token to sort ROMs into the right place. See the [replaceable tokens page](../../output/tokens.md) for more information. +RomM uses its own [proprietary ROM folder structure](https://github.com/rommapp/romm/wiki/Supported-Platforms), so Igir has a replaceable `{romm}` token to sort ROMs into the right place. See the [replaceable tokens page](../../output/tokens.md) for more information. You can run RomM using [Docker Compose](https://docs.docker.com/compose/). Create a file named `docker-compose.yml` with the following contents, but change all of the environment variables with the value of `CHANGEME!`: diff --git a/docs/usage/handheld/adam.md b/docs/usage/handheld/adam.md index 7cc1d442d..6ccd0c788 100644 --- a/docs/usage/handheld/adam.md +++ b/docs/usage/handheld/adam.md @@ -53,11 +53,11 @@ The Adam image does not come with BIOS files. Where you have to put which of you ## ROMs -Adam supports many different ROM formats in subfolders of `ROMS` on the second SD card (TF2). An exhaustive list can be found in [their wiki](https://github.com/eduardofilo/RG350_adam_image/tree/master/data/local/home/.simplemenu/section_groups), where you can also find information about which ROMS are supported in compressed form. Most supported systems and their ROMS can be automatically sorted by `igir` using the `{adam}` output token. See the [replaceable tokens page](../../output/tokens.md) for more information. +Adam supports many different ROM formats in subfolders of `ROMS` on the second SD card (TF2). An exhaustive list can be found in [their wiki](https://github.com/eduardofilo/RG350_adam_image/tree/master/data/local/home/.simplemenu/section_groups), where you can also find information about which ROMS are supported in compressed form. Most supported systems and their ROMS can be automatically sorted by Igir using the `{adam}` output token. See the [replaceable tokens page](../../output/tokens.md) for more information. !!! tip - Please note that sorting the supported Arcade machine releases (MAME, CPS, FBA) in a single pass is not supported by `igir` at this time. Try the [Arcade docs](../arcade.md) docs for help with this. + Please note that sorting the supported Arcade machine releases (MAME, CPS, FBA) in a single pass is not supported by Igir at this time. Try the [Arcade docs](../arcade.md) docs for help with this. === ":simple-windowsxp: Windows" diff --git a/docs/usage/handheld/funkeyos.md b/docs/usage/handheld/funkeyos.md index 02cef1539..12f04f321 100644 --- a/docs/usage/handheld/funkeyos.md +++ b/docs/usage/handheld/funkeyos.md @@ -17,7 +17,7 @@ To sum up the documentation, two files need to be copied: ## ROMs -Funkey OS uses its own proprietary [ROM folder structure](https://github.com/FunKey-Project/FunKey-OS/tree/master/FunKey/board/funkey/rootfs-overlay/usr/games/collections) based in the root of the SD card, so `igir` has a replaceable `{funkeyos}` token to sort ROMs into the right place. See the [replaceable tokens page](../../output/tokens.md) for more information. +Funkey OS uses its own proprietary [ROM folder structure](https://github.com/FunKey-Project/FunKey-OS/tree/master/FunKey/board/funkey/rootfs-overlay/usr/games/collections) based in the root of the SD card, so Igir has a replaceable `{funkeyos}` token to sort ROMs into the right place. See the [replaceable tokens page](../../output/tokens.md) for more information. === ":simple-windowsxp: Windows" diff --git a/docs/usage/handheld/jelos.md b/docs/usage/handheld/jelos.md index 26db832f9..36fde18e2 100644 --- a/docs/usage/handheld/jelos.md +++ b/docs/usage/handheld/jelos.md @@ -69,7 +69,7 @@ JELOS has its BIOS folder at `roms/bios/`, and it uses the RetroArch filenames. ## ROMs -JELOS supports many many systems and ROM formats. Check sections under the `Systems` menu in the [JELOS Wiki](https://jelos.org/) for more precise instructions about the indivudual systems. Most supported systems and their ROMS can be automatically sorted by `igir` using the `{jelos}` output token. See the [replaceable tokens page](../../output/tokens.md) for more information. +JELOS supports many many systems and ROM formats. Check sections under the `Systems` menu in the [JELOS Wiki](https://jelos.org/) for more precise instructions about the indivudual systems. Most supported systems and their ROMS can be automatically sorted by Igir using the `{jelos}` output token. See the [replaceable tokens page](../../output/tokens.md) for more information. === ":simple-windowsxp: Windows" diff --git a/docs/usage/handheld/minui.md b/docs/usage/handheld/minui.md index 5f2f9c1f6..4cbc21db3 100644 --- a/docs/usage/handheld/minui.md +++ b/docs/usage/handheld/minui.md @@ -25,9 +25,9 @@ Place these files under `/Bios//`: ## ROMs -MinUI supports many many systems and ROM formats. Check the folders [here (base)](https://github.com/shauninman/MinUI/tree/main/skeleton/BASE/Roms) and [here (extras)](https://github.com/shauninman/MinUI/tree/main/skeleton/EXTRAS/Roms) for a comprehensive list about the indivudual systems. Most supported systems and their ROMS can be automatically sorted by `igir` using the `{minui}` output token. See the [replaceable tokens page](../../output/tokens.md) for more information. +MinUI supports many many systems and ROM formats. Check the folders [here (base)](https://github.com/shauninman/MinUI/tree/main/skeleton/BASE/Roms) and [here (extras)](https://github.com/shauninman/MinUI/tree/main/skeleton/EXTRAS/Roms) for a comprehensive list about the indivudual systems. Most supported systems and their ROMS can be automatically sorted by Igir using the `{minui}` output token. See the [replaceable tokens page](../../output/tokens.md) for more information. -MinUI uses the names unter /Roms on the SD card in a more creative way than most other frontends. The folder names consist of a *UI name* and a *PAK name*. The *UI name* is used as the name shown in the User interface as a list item name, while the *PAK name* controls which software pack is used to open the files within. Files with the same *UI name* but different *PAK name* are listed in the same list in the UI but are opened with different PAKs. `igir` uses the vendor recommendations for the folder names with some exceptions. +MinUI uses the names unter /Roms on the SD card in a more creative way than most other frontends. The folder names consist of a *UI name* and a *PAK name*. The *UI name* is used as the name shown in the User interface as a list item name, while the *PAK name* controls which software pack is used to open the files within. Files with the same *UI name* but different *PAK name* are listed in the same list in the UI but are opened with different PAKs. Igir uses the vendor recommendations for the folder names with some exceptions. MinUI requires multi-file releases to be grouped into subdirectories (bin/cue releases of the PS1 for example). It is recommended to use the [`--dir-game-subdir multiple` option](../../output/path-options.md), which is the default at this time. diff --git a/docs/usage/handheld/miyoocfw.md b/docs/usage/handheld/miyoocfw.md index 4749c36bf..28fc69854 100644 --- a/docs/usage/handheld/miyoocfw.md +++ b/docs/usage/handheld/miyoocfw.md @@ -25,7 +25,7 @@ MiyooCFW doesn't seem to have a centralized folder for putting BIOS files so it' ## ROMs -MiyooCFW supports many many systems and ROM formats. Check the table on the [MiyooCFW Wiki](https://github.com/TriForceX/MiyooCFW/wiki/Emulator-Info) for more precise instructions about the indivudual systems. Most supported systems and their ROMS can be automatically sorted by `igir` using the `{miyoocfw}` output token. See the [replaceable tokens page](../../output/tokens.md) for more information. +MiyooCFW supports many many systems and ROM formats. Check the table on the [MiyooCFW Wiki](https://github.com/TriForceX/MiyooCFW/wiki/Emulator-Info) for more precise instructions about the indivudual systems. Most supported systems and their ROMS can be automatically sorted by Igir using the `{miyoocfw}` output token. See the [replaceable tokens page](../../output/tokens.md) for more information. === ":simple-windowsxp: Windows" diff --git a/docs/usage/handheld/onionos.md b/docs/usage/handheld/onionos.md index 29e289497..971b63e14 100644 --- a/docs/usage/handheld/onionos.md +++ b/docs/usage/handheld/onionos.md @@ -45,7 +45,7 @@ OnionOS has its BIOS folder at the root of the SD card at `/BIOS/`, and it uses ## ROMs -OnionOS uses its own proprietary [ROM folder structure](https://github.com/OnionUI/Onion/wiki/Emulators#rom-folders---quick-reference), so `igir` has a replaceable `{onion}` token to sort ROMs into the right place. See the [replaceable tokens page](../../output/tokens.md) for more information. +OnionOS uses its own proprietary [ROM folder structure](https://github.com/OnionUI/Onion/wiki/Emulators#rom-folders---quick-reference), so Igir has a replaceable `{onion}` token to sort ROMs into the right place. See the [replaceable tokens page](../../output/tokens.md) for more information. === ":simple-windowsxp: Windows" diff --git a/docs/usage/handheld/twmenu.md b/docs/usage/handheld/twmenu.md index a99fe148e..5c7cdaa81 100644 --- a/docs/usage/handheld/twmenu.md +++ b/docs/usage/handheld/twmenu.md @@ -12,7 +12,7 @@ TWiLightMenu++ ships with most emulators not needing BIOS files. No exceptions a ## ROMs -TWiLightMenu uses its own proprietary [ROM folder structure](https://github.com/DS-Homebrew/TWiLightMenu/tree/master/7zfile/roms) based in the root of the SD card, so `igir` has a replaceable `{twmenu}` token to sort ROMs into the right place. See the [replaceable tokens page](../../output/tokens.md) for more information. +TWiLightMenu uses its own proprietary [ROM folder structure](https://github.com/DS-Homebrew/TWiLightMenu/tree/master/7zfile/roms) based in the root of the SD card, so Igir has a replaceable `{twmenu}` token to sort ROMs into the right place. See the [replaceable tokens page](../../output/tokens.md) for more information. === ":simple-windowsxp: Windows" diff --git a/docs/usage/hardware/analogue-pocket.md b/docs/usage/hardware/analogue-pocket.md index a2a652de7..78ab43e2e 100644 --- a/docs/usage/hardware/analogue-pocket.md +++ b/docs/usage/hardware/analogue-pocket.md @@ -14,7 +14,7 @@ Most Pocket updater utilities will download BIOS files required for each core fo ## ROMs -`igir` has support for replaceable "tokens" in the `--output ` option. This makes it easier to sort ROMs on devices that have an expected directory structure. The `{pocket}` token exists to help sort ROMs on the Analogue pocket. See the [replaceable tokens page](../../output/tokens.md) for more information. +Igir has support for replaceable "tokens" in the `--output ` option. This makes it easier to sort ROMs on devices that have an expected directory structure. The `{pocket}` token exists to help sort ROMs on the Analogue pocket. See the [replaceable tokens page](../../output/tokens.md) for more information. This token can be used to reference each core's specific directory in the SD card's `Assets` directory. ROMs go in the `Assets/{pocket}/common` directory. diff --git a/docs/usage/hardware/everdrive.md b/docs/usage/hardware/everdrive.md index 20b05f603..ef40babbb 100644 --- a/docs/usage/hardware/everdrive.md +++ b/docs/usage/hardware/everdrive.md @@ -4,7 +4,7 @@ The [EverDrive](https://krikzz.com/) flash carts by Krikzz are some of the highe ## ROMs -Because flash carts are specific to a specific console, you can provide specific input directories and [DATs](../../dats/introduction.md) when you run `igir`. For example: +Because flash carts are specific to a specific console, you can provide specific input directories and [DATs](../../dats/introduction.md) when you run Igir. For example: === ":simple-windowsxp: Windows" @@ -44,7 +44,7 @@ Because flash carts are specific to a specific console, you can provide specific you can then add some other output options such as the [`--dir-letter` option](../../output/path-options.md), if desired. -Alternatively, `igir` supports [Hardware Target Game Database SMDB files](https://github.com/frederic-mahe/Hardware-Target-Game-Database/tree/master/EverDrive%20Pack%20SMDBs) as [DATs](../../dats/introduction.md). Unlike typical DATs, Hardware Target Game Database SMDBs typically have an opinionated directory structure to help sort ROMs by language, category, genre, and more. Example usage: +Alternatively, Igir supports [Hardware Target Game Database SMDB files](https://github.com/frederic-mahe/Hardware-Target-Game-Database/tree/master/EverDrive%20Pack%20SMDBs) as [DATs](../../dats/introduction.md). Unlike typical DATs, Hardware Target Game Database SMDBs typically have an opinionated directory structure to help sort ROMs by language, category, genre, and more. Example usage: === ":simple-windowsxp: Windows" diff --git a/docs/usage/hardware/ezflash.md b/docs/usage/hardware/ezflash.md index 7c85bba41..cc1dea387 100644 --- a/docs/usage/hardware/ezflash.md +++ b/docs/usage/hardware/ezflash.md @@ -4,7 +4,7 @@ The [EZ-FLASH](https://www.ezflash.cn/) flash carts for Nintendo handhelds are a ## ROMs -Because flash carts are specific to a specific console, you can provide specific input directories & [DATs](../../dats/introduction.md) when you run `igir`. For example: +Because flash carts are specific to a specific console, you can provide specific input directories & [DATs](../../dats/introduction.md) when you run Igir. For example: === ":simple-windowsxp: Windows" diff --git a/docs/usage/hardware/mister.md b/docs/usage/hardware/mister.md index 5efa4a550..ecf18514c 100644 --- a/docs/usage/hardware/mister.md +++ b/docs/usage/hardware/mister.md @@ -8,7 +8,7 @@ The MiSTer [`update_all.sh`](https://github.com/theypsilon/Update_All_MiSTer) sc ## ROMs -`igir` has support for replaceable "tokens" in the `--output ` option. This makes it easier to sort ROMs on devices that have an expected directory structure. The `{mister}` token exists to help sort ROMs on the MiSTer. See the [replaceable tokens page](../../output/tokens.md) for more information. +Igir has support for replaceable "tokens" in the `--output ` option. This makes it easier to sort ROMs on devices that have an expected directory structure. The `{mister}` token exists to help sort ROMs on the MiSTer. See the [replaceable tokens page](../../output/tokens.md) for more information. This token can be used to reference each core's specific directory in the MiSTer's `games` directory. diff --git a/docs/usage/personal.md b/docs/usage/personal.md index aaa490db5..2ba9eda4b 100644 --- a/docs/usage/personal.md +++ b/docs/usage/personal.md @@ -1,6 +1,6 @@ -# Creator's Usage +# Maintainer's Usage Example -`igir` has many options available to fit almost any use case, but the number of options can be overwhelming. So that begs a question: _how do I, the creator of `igir`, use `igir` in the real world?_ +Igir has many options available to fit almost any use case, but the number of options can be overwhelming. So that begs a question: _how do I, the maintainer of Igir, use Igir in the real world?_ ## Primary ROM library @@ -58,22 +58,60 @@ for INPUT in "$@"; do INPUTS+=(--input "${INPUT}") done +# Cartridge-based consoles, 1st-5th generations npx --yes igir@latest move zip test clean report \ --dat "./No-Intro*.zip" \ - --dat-name-regex-exclude "/encrypted/i" \ + --dat-name-regex-exclude "/encrypted|source code/i" \ --input "./No-Intro/" \ - "${INPUTS[@]}" \ + "${INPUTS[@]:-}" \ + `# Trust checksums in archive headers, don't checksum archives (we only care about the contents)` \ + --input-checksum-max CRC32 \ + --input-checksum-archives never \ --patch "./Patches/" \ --output "./No-Intro/" \ --dir-dat-name \ - --overwrite-invalid + --overwrite-invalid \ + --zip-exclude "*.{chd,iso}" \ + --reader-threads 4 \ + -v -npx --yes igir@latest move zip test \ +# Disc-based consoles, 4th+ generations +npx --yes igir@latest move test clean report \ --dat "./Redump*.zip" \ + --dat-name-regex-exclude "/Dreamcast/i" \ --input "./Redump/" \ "${INPUTS[@]}" \ + `# Let maxcso calculate CSO CRC32s, don't checksum compressed discs (we only care about the contents)` \ + --input-checksum-max CRC32 \ + --input-checksum-archives never \ + --patch "./Patches/" \ --output "./Redump/" \ - --dir-dat-name + --dir-dat-name \ + --overwrite-invalid \ + --only-retail \ + --single \ + --prefer-language EN \ + --prefer-region USA,WORLD,EUR,JPN \ + --prefer-revision newer \ + -v + +# Dreamcast (because TOSEC catalogs chdman-compatible .gdi files and Redump catalogs .bin/.cue) +npx --yes igir@latest move test clean report \ + --dat "./TOSEC*.zip" \ + --dat-name-regex "/Dreamcast/i" \ + --dat-combine \ + --input "./TOSEC/" \ + "${INPUTS[@]}" \ + --input-checksum-archives never \ + --patch "./Patches/" \ + --output "./TOSEC/Sega Dreamcast" \ + --overwrite-invalid \ + --only-retail \ + --single \ + --prefer-language EN \ + --prefer-region USA,WORLD,EUR,JPN \ + --prefer-revision newer \ + -v npx --yes igir@latest move zip test clean \ `# Official MAME XML extracted from the progetto-SNAPS archive` \ @@ -82,10 +120,13 @@ npx --yes igir@latest move zip test clean \ --dat "./MAME*Rollback*.zip" \ --input "./MAME/" \ "${INPUTS[@]}" \ + --input-checksum-quick \ + --input-checksum-archives never \ --output "./MAME/" \ --dir-dat-name \ --overwrite-invalid \ - --merge-roms split + --merge-roms merged \ + -v ``` I then copy ROMs to other devices from this source of truth. @@ -111,23 +152,29 @@ SOURCE=/Volumes/WDPassport4 npx igir@latest copy extract test clean \ --dat "${SOURCE}/No-Intro*.zip" \ - --dat-name-regex-exclude "/headerless/i" \ + --dat-name-regex-exclude "/headerless|OSTs/i" \ --input "${SOURCE}/No-Intro/" \ + --input-exclude "${SOURCE}/No-Intro/Atari - 7800 (BIN)/" \ + --input-exclude "${SOURCE}/No-Intro/Commodore - Amiga*/**" \ + --input-exclude "${SOURCE}/No-Intro/Nintendo - Nintendo - Family Computer Disk System (QD)/" \ --input-exclude "${SOURCE}/No-Intro/Nintendo - Game Boy Advance (e-Reader)/" \ + --input-checksum-quick \ --patch "${SOURCE}/Patches/" \ --output "./Assets/{pocket}/common/" \ --dir-letter \ --dir-letter-limit 1000 \ `# Leave BIOS files alone` \ --clean-exclude "./Assets/*/common/*.*" \ + --clean-exclude "./Assets/*/common/Palettes/**" \ --overwrite-invalid \ --no-bios \ --no-bad \ --single \ --prefer-language EN \ --prefer-region USA,WORLD,EUR,JPN \ - --prefer-revision-newer \ - --prefer-retail + --prefer-revision newer \ + --prefer-retail \ + -v ``` That lets me create an EN+USA preferred 1G1R set for my Pocket on the fly, making sure I don't delete BIOS files needed for each core. @@ -144,16 +191,32 @@ I have this script `sd2sp2_pocket_sync.sh` at the root of my GameCube [SD2SP2](h #!/usr/bin/env bash set -euo pipefail +# shellcheck disable=SC2064 +trap "cd \"${PWD}\"" EXIT +cd "$(dirname "$0")" + + SOURCE=/Volumes/WDPassport4 -npx --yes igir@latest copy extract test clean \ +npx --yes igir@latest copy test clean report \ + --dat "${SOURCE}/Redump*.zip" \ + --dat-name-regex "/GameCube/i" \ --input "${SOURCE}/Redump/Nintendo - GameCube" \ - --output "./ISOs/" \ + --input-checksum-quick \ + --input-checksum-archives never \ + --patch "${SOURCE}/Patches" \ + --output "./Games/" \ --dir-letter \ + --overwrite-invalid \ + --filter-regex-exclude "/(Angler|Baseball|Basketball|Bass|Bonus Disc|Cabela|Disney|ESPN|F1|FIFA|Football|Golf|Madden|MLB|MLS|NASCAR|NBA|NCAA|NFL|NHL|Nickelodeon|Nick Jr|Nicktoons|PGA|Poker|Soccer|Tennis|Tonka|UFC|WWE)/i" \ --no-bios \ --only-retail \ - --filter-regex-exclude "/(Baseball|Cabela|F1|FIFA|Football|Golf|Madden|MLB|NASCAR|NBA|NCAA|NFL|NHL|PGA|Soccer|Tennis|UFC|WWE)/i" \ - --writer-threads 1 + --single \ + --prefer-language EN \ + --prefer-region USA,WORLD,EUR,JPN \ + --prefer-revision newer \ + --writer-threads 1 \ + -v ``` -It doesn't use DATs because I have the ISOs in a trimmed NKit format (see [Swiss](https://github.com/emukidid/swiss-gc)), so they won't match the checksums in DATs. I also exclude some games due to limited SD card size. +I use the trimmed [NKit format](https://wiki.gbatemp.net/wiki/NKit) for ISOs, which don't make sense to extract, so they're copied as-is. I also exclude some games due to limited SD card size. diff --git a/index.ts b/index.ts index b5284fce2..a12a06611 100644 --- a/index.ts +++ b/index.ts @@ -27,12 +27,11 @@ gracefulFs.gracefulify(realFs); process.exit(1); } - process.once('SIGINT', async () => { + process.once('SIGINT', () => { + ProgressBarCLI.stop(); logger.newLine(); logger.notice(`Exiting ${Package.NAME} early`); - await ProgressBarCLI.stop(); process.exit(0); - // TODO(cemmer): does exit here cause cleanup not to happen? }); // Parse CLI arguments @@ -67,9 +66,9 @@ gracefulFs.gracefulify(realFs); new UpdateChecker(logger).check(); await new Igir(options, logger).main(); - await ProgressBarCLI.stop(); + ProgressBarCLI.stop(); } catch (error) { - await ProgressBarCLI.stop(); + ProgressBarCLI.stop(); if (error instanceof ExpectedError) { logger.error(error); } else if (error instanceof Error && error.stack) { diff --git a/jest.config.ts b/jest.config.ts index c4411386c..cfb729aa1 100644 --- a/jest.config.ts +++ b/jest.config.ts @@ -1,6 +1,26 @@ +import fs from 'node:fs'; +import path from 'node:path'; + import { JestConfigWithTsJest } from 'ts-jest'; +// Fix some bad package.json files that don't play well with ts-jest +[ + // https://github.com/g-plane/cue/issues/1 + '@gplane/cue', +].forEach((moduleName) => { + const modulePath = path.join('node_modules', moduleName); + const packagePath = path.join(modulePath, 'package.json'); + const packageJson = JSON.parse(fs.readFileSync(packagePath).toString()); + + packageJson.main = packageJson.main + ?? packageJson.exports['.'].import; + delete packageJson.exports; + + fs.writeFileSync(packagePath, JSON.stringify(packageJson, undefined, 2)); +}); + const jestConfig: JestConfigWithTsJest = { + preset: 'ts-jest', testEnvironment: 'node', setupFilesAfterEnv: ['jest-extended/all'], diff --git a/mkdocs.yml b/mkdocs.yml index 614c3c516..f67d59b3e 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -2,7 +2,7 @@ site_name: igir # https://github.com/mkdocs/mkdocs/issues/1783: site_url required for sitemap.xml site_url: https://igir.io site_author: Christian Emmer -# site_description: TODO +site_description: Igir is a zero-setup ROM collection manager that sorts, filters, extracts or archives, patches, and reports on collections of any size on any OS. repo_name: emmercm/igir repo_url: https://github.com/emmercm/igir @@ -11,8 +11,8 @@ edit_uri: edit/main/docs/ remote_branch: gh-pages theme: - logo: logo-light.svg - favicon: logo-dark.svg + logo: static/logo-light.svg + favicon: static/favicon.svg name: material palette: @@ -41,79 +41,82 @@ theme: nav: - Documentation: - - index.md - - Getting Started: - - overview.md - - installation.md - - commands.md - - alternatives.md - - Example Usage: - - usage/collection-sorting.md - - Emulator Frontends: - - usage/handheld/adam.md - - usage/desktop/batocera.md - - usage/desktop/emuelec.md - - usage/desktop/emulationstation.md - - usage/handheld/funkeyos.md - - usage/handheld/jelos.md - - usage/desktop/lakka.md - - usage/desktop/launchbox.md - - usage/handheld/minui.md - - usage/handheld/miyoocfw.md - - usage/handheld/onionos.md - - usage/desktop/openemu.md - - usage/desktop/recalbox.md - - usage/desktop/retroarch.md - - usage/desktop/retrodeck.md - - usage/desktop/retropie.md - - usage/desktop/romm.md - - usage/handheld/twmenu.md - - FPGA: - - usage/hardware/mister.md - - usage/hardware/analogue-pocket.md - - Flash Carts: - - usage/hardware/everdrive.md - - usage/hardware/ezflash.md - - Game Consoles: - - usage/console/gamecube.md - - usage/console/ps2.md - - usage/arcade.md - - usage/personal.md - - DATs: - - dats/introduction.md - - dats/processing.md - - dats/dir2dat.md - - dats/fixdats.md - - File Inputs: - - input/file-scanning.md - - input/reading-archives.md - - ROM Processing: - - roms/matching.md - - roms/filtering-preferences.md - - roms/headers.md - - roms/patching.md - - File Outputs: - - output/path-options.md - - output/tokens.md - - output/options.md - - output/writing-archives.md - - output/reporting.md - - output/cleaning.md - - Advanced: - - advanced/logging.md - - advanced/temp-dir.md - - advanced/troubleshooting.md - - advanced/internals.md - - Misc: - - rom-dumping.md - - Terms and Conditions: - - contributing.md - - license.md + - index.md + - Getting Started: + - introduction.md + - installation.md + - commands.md + - cli.md + - alternatives.md + - General Usage: + - usage/basic.md + - usage/personal.md + - usage/best-practices.md + - Hardware-Specific Usage: + - Emulator Frontends: + - usage/handheld/adam.md + - usage/desktop/batocera.md + - usage/desktop/emuelec.md + - usage/desktop/emulationstation.md + - usage/handheld/funkeyos.md + - usage/handheld/jelos.md + - usage/desktop/lakka.md + - usage/desktop/launchbox.md + - usage/handheld/minui.md + - usage/handheld/miyoocfw.md + - usage/handheld/onionos.md + - usage/desktop/openemu.md + - usage/desktop/recalbox.md + - usage/desktop/retroarch.md + - usage/desktop/retrodeck.md + - usage/desktop/retropie.md + - usage/desktop/romm.md + - usage/handheld/twmenu.md + - FPGA: + - usage/hardware/mister.md + - usage/hardware/analogue-pocket.md + - Flash Carts: + - usage/hardware/everdrive.md + - usage/hardware/ezflash.md + - Game Consoles: + - usage/console/gamecube.md + - usage/console/ps2.md + - usage/arcade.md + - DATs: + - dats/introduction.md + - dats/processing.md + - dats/dir2dat.md + - dats/fixdats.md + - File Inputs: + - input/file-scanning.md + - input/reading-archives.md + - ROM Processing: + - roms/matching.md + - roms/filtering-preferences.md + - roms/headers.md + - roms/patching.md + - File Outputs: + - output/path-options.md + - output/tokens.md + - output/options.md + - output/writing-archives.md + - output/reporting.md + - output/cleaning.md + - Advanced: + - advanced/logging.md + - advanced/temp-dir.md + - advanced/troubleshooting.md + - advanced/internals.md + - Misc: + - rom-dumping.md + - Terms and Conditions: + - contributing.md + - license.md # https://github.com/squidfunk/mkdocs-material/issues/889#issuecomment-582297142: how-to open nav links in new tabs - Download โ†—: https://github.com/emmercm/igir/releases/latest" target="_blank - - Donate โ†—: https://github.com/sponsors/emmercm" target="_blank - - Issues โ†—: https://github.com/emmercm/igir/issues?q=is%3Aopen+is%3Aissue+label%3Abug" target="_blank - Discuss โ†—: https://github.com/emmercm/igir/discussions" target="_blank + - Issues โ†—: https://github.com/emmercm/igir/issues?q=is%3Aopen+is%3Aissue+label%3Abug" target="_blank + - Donate โ†—: https://github.com/sponsors/emmercm" target="_blank plugins: - unused_files: @@ -134,15 +137,17 @@ plugins: 'archives.md': 'input/reading-archives.md' 'dats.md': 'dats/introduction.md' 'dats/overview.md': 'dats/introduction.md' - 'examples.md': 'usage/collection-sorting.md' + 'examples.md': 'usage/basic.md' 'input/archives.md': 'input/reading-archives.md' 'input/dats.md': 'dats/introduction.md' 'internals.md': 'advanced/internals.md' 'output/arcade.md': 'usage/arcade.md' + 'overview.md': 'introduction.md' 'reporting.md': 'output/reporting.md' 'rom-filtering.md': 'roms/filtering-preferences.md' 'rom-headers.md': 'roms/headers.md' 'rom-patching.md': 'roms/patching.md' + 'usage/collection-sorting.md': 'usage/basic.md' #- htmlproofer: # raise_error_excludes: # '-1': [ 'http://www.logiqx.com' ] diff --git a/package-lock.json b/package-lock.json index 0dce3dd32..c4f9d6ec4 100644 --- a/package-lock.json +++ b/package-lock.json @@ -12,12 +12,14 @@ "dependencies": { "@fast-csv/format": "5.0.0", "@fast-csv/parse": "5.0.0", + "@gplane/cue": "0.2.0", "@node-rs/crc32": "1.10.3", "7zip-min": "1.4.5", "archiver": "7.0.1", "async": "3.2.6", "async-mutex": "0.5.0", "chalk": "5.3.0", + "chdman": "0.267.3", "class-transformer": "0.5.1", "cli-progress": "3.12.0", "fast-glob": "3.3.2", @@ -26,6 +28,7 @@ "graceful-fs": "4.2.11", "is-admin": "4.0.0", "junk": "4.0.1", + "maxcso": "0.1130.6", "micromatch": "4.0.8", "moment": "2.30.1", "node-disk-info": "1.3.0", @@ -903,6 +906,11 @@ "lodash.uniq": "^4.5.0" } }, + "node_modules/@gplane/cue": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/@gplane/cue/-/cue-0.2.0.tgz", + "integrity": "sha512-X5OzPA/Y2NG6IJUbIXLiGSCev0L4AwbOLUoO+dhrLzw70Qcd9S5QeC0SDAjtZZ3jVxvrtO5mRgCSAwpQMHPmhg==" + }, "node_modules/@humanwhocodes/config-array": { "version": "0.11.14", "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.14.tgz", @@ -4015,6 +4023,23 @@ "node": ">=10" } }, + "node_modules/chdman": { + "version": "0.267.3", + "resolved": "https://registry.npmjs.org/chdman/-/chdman-0.267.3.tgz", + "integrity": "sha512-pLg59Xcc7ux4XCuXHO8gp7bNQIAL0dCHc0HebrddBdFzBRIzxbaDEyyIItMkoknKN372v95RWHe0plGnC8D+VQ==", + "dependencies": { + "which": "^4.0.0" + }, + "bin": { + "chdman": "dist/src/bin.js" + }, + "engines": { + "node": ">=16.6.0" + }, + "funding": { + "url": "https://github.com/sponsors/emmercm" + } + }, "node_modules/chownr": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", @@ -5676,7 +5701,6 @@ "url": "https://paypal.me/naturalintelligence" } ], - "license": "MIT", "dependencies": { "strnum": "^1.0.5" }, @@ -6875,7 +6899,6 @@ "version": "3.1.1", "resolved": "https://registry.npmjs.org/isexe/-/isexe-3.1.1.tgz", "integrity": "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ==", - "dev": true, "license": "ISC", "engines": { "node": ">=16" @@ -9001,6 +9024,23 @@ "tmpl": "1.0.5" } }, + "node_modules/maxcso": { + "version": "0.1130.6", + "resolved": "https://registry.npmjs.org/maxcso/-/maxcso-0.1130.6.tgz", + "integrity": "sha512-Sv3dFgiJztpf8aUW29CqEkyHXYDfdvWhePp4+U7cimt3HI/7az4RxTkyR9A8BhIQdFeXu73kbZS9Qsw9Bf61Fw==", + "dependencies": { + "which": "^4.0.0" + }, + "bin": { + "maxcso": "dist/src/bin.js" + }, + "engines": { + "node": ">=16.6.0" + }, + "funding": { + "url": "https://github.com/sponsors/emmercm" + } + }, "node_modules/merge-stream": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", @@ -11763,7 +11803,6 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/which/-/which-4.0.0.tgz", "integrity": "sha512-GlaYyEb07DPxYCKhKzplCWBJtvxZcZMrL+4UkrTSJHHPyZU4mYYTv3qaOe77H7EODLSSopAUFAc6W8U4yqvscg==", - "dev": true, "license": "ISC", "dependencies": { "isexe": "^3.1.1" diff --git a/package.json b/package.json index 77025d36c..c134105bc 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "igir", "version": "2.11.0", - "description": "๐Ÿ•น A video game ROM collection manager to help filter, sort, patch, archive, and report on collections on any OS.", + "description": "๐Ÿ•น A zero-setup ROM collection manager that sorts, filters, extracts or archives, patches, and reports on collections of any size on any OS.", "keywords": [ "1g1r", "analogue-pocket", @@ -70,12 +70,14 @@ "dependencies": { "@fast-csv/format": "5.0.0", "@fast-csv/parse": "5.0.0", + "@gplane/cue": "0.2.0", "@node-rs/crc32": "1.10.3", "7zip-min": "1.4.5", "archiver": "7.0.1", "async": "3.2.6", "async-mutex": "0.5.0", "chalk": "5.3.0", + "chdman": "0.267.3", "class-transformer": "0.5.1", "cli-progress": "3.12.0", "fast-glob": "3.3.2", @@ -84,6 +86,7 @@ "graceful-fs": "4.2.11", "is-admin": "4.0.0", "junk": "4.0.1", + "maxcso": "0.1130.6", "micromatch": "4.0.8", "moment": "2.30.1", "node-disk-info": "1.3.0", diff --git a/package.ts b/package.ts index fb0d7699a..040078f64 100644 --- a/package.ts +++ b/package.ts @@ -86,6 +86,12 @@ const fileFilter = (filters: FileFilter[]): string[] => { // Only include the exact 7zip-bin we need { exclude: 'node_modules/{**/,}7zip-bin/**/7z*' }, { include: path7za }, + // Only include the exact chdman bin we need + { exclude: 'node_modules/{**/,}chdman/bin/*/*/chdman*' }, + { include: `node_modules/{**/,}chdman/bin/${process.platform}/${process.arch}/chdman*` }, + // Only include the exact maxcso bin we need + { exclude: 'node_modules/{**/,}maxcso/bin/*/*/maxcso*' }, + { include: `node_modules/{**/,}maxcso/bin/${process.platform}/${process.arch}/maxcso*` }, ])); const includeSize = (await Promise.all([...include].map(async (file) => { if (await FsPoly.isDirectory(file)) { @@ -131,7 +137,7 @@ const fileFilter = (filters: FileFilter[]): string[] => { proc.stdout.on('data', (chunk) => { procOutput += chunk.toString(); }); proc.stderr.on('data', (chunk) => { procOutput += chunk.toString(); }); await new Promise((resolve, reject) => { - proc.on('exit', resolve); + proc.on('close', resolve); proc.on('error', reject); }); logger.trace(procOutput); diff --git a/scripts/asciinema-rec.sh b/scripts/asciinema-rec.sh index b2b406bb7..de46443c4 100755 --- a/scripts/asciinema-rec.sh +++ b/scripts/asciinema-rec.sh @@ -22,22 +22,20 @@ if [[ "${1:-}" == "play" ]]; then # shellcheck disable=SC2317 npx() { shift # discard "igir@latest" - node ../dist/index.js "$@" --dat-name-regex-exclude "/encrypted|headerless/i" + node ../dist/index.js "$@" --dat-name-regex-exclude "/encrypted|headerless|3ds/i" --disable-cache } # shellcheck disable=SC2317 tree() { - command tree -N -I -- *.rsl* "$@" + command tree -N "$@" } # BEGIN PLAYBACK # ts-node ./index.ts copy zip clean -d demo/No-Intro*.zip -i GB/ -i NES/ -o demo/roms/ -D - pei "ls -gn" + pei 'tree -L 1 .' echo "" && sleep 2 - pei "unzip -l No-Intro*.zip | head -10" || true + pei 'npx igir@latest copy zip report --dat "No-Intro*.zip" --input ROMs/ --output ROMs-Sorted/ --dir-dat-name --only-retail' echo "" && sleep 2 - pei "npx igir@latest copy zip report --dat No-Intro*.zip --input roms/ --output roms-sorted/ --dir-dat-name --only-retail" - echo "" - pei "ls -gn roms-sorted/" + pei 'tree -L 1 ROMs-Sorted/' # END PLAYBACK exit 0 @@ -82,9 +80,8 @@ npm --version &> /dev/null || exit 1 npm run build # Clean any previous output -if [[ -d "${DEMO_DIR}/roms-sorted" ]]; then - rm -rf "${DEMO_DIR}/roms-sorted" -fi +rm -rf "${DEMO_DIR}/roms-sorted" +rm -rf "${DEMO_DIR}/*.csv" clear if [[ "${1:-}" == "rec" ]]; then diff --git a/src/console/logger.ts b/src/console/logger.ts index e03d6f4bb..521398449 100644 --- a/src/console/logger.ts +++ b/src/console/logger.ts @@ -164,11 +164,11 @@ export default class Logger { /** * Create a {@link ProgressBar} with a reference to this {@link Logger}. */ - async addProgressBar( + addProgressBar( name: string, symbol = ProgressBarSymbol.WAITING, initialTotal = 0, - ): Promise { + ): ProgressBar { return ProgressBarCLI.new(this, name, symbol, initialTotal); } diff --git a/src/console/progressBar.ts b/src/console/progressBar.ts index 4c0c374e6..69fbe079e 100644 --- a/src/console/progressBar.ts +++ b/src/console/progressBar.ts @@ -12,21 +12,23 @@ export const ProgressBarSymbol = { WAITING: chalk.grey(process.platform === 'win32' ? 'โ€ฆ' : 'โ‹ฏ'), DONE: chalk.green(process.platform === 'win32' ? 'โˆš' : 'โœ“'), // Files - SEARCHING: chalk.magenta(process.platform === 'win32' ? 'โ—‹' : 'โ†ป'), - DOWNLOADING: chalk.magenta('โ†“'), - PARSING_CONTENTS: chalk.magenta('ฮฃ'), - DETECTING_HEADERS: chalk.magenta('^'), - INDEXING: chalk.magenta('#'), + FILE_SCANNING: chalk.magenta(process.platform === 'win32' ? 'โ—‹' : 'โ†ป'), + DAT_DOWNLOADING: chalk.magenta('โ†“'), + DAT_PARSING: chalk.magenta('ฮฃ'), + ROM_HASHING: chalk.magenta('#'), + ROM_HEADER_DETECTION: chalk.magenta('^'), + ROM_INDEXING: chalk.magenta('โ™ฆ'), // Processing a single DAT - GROUPING_SIMILAR: chalk.cyan('โˆฉ'), - MERGE_SPLIT: chalk.cyan('โ†”'), + DAT_GROUPING_SIMILAR: chalk.cyan('โˆฉ'), + DAT_MERGE_SPLIT: chalk.cyan('โ†”'), // Candidates - GENERATING: chalk.cyan('ฮฃ'), - FILTERING: chalk.cyan('โˆ†'), - EXTENSION_CORRECTION: chalk.cyan('.'), - HASHING: chalk.cyan('#'), - VALIDATING: chalk.cyan(process.platform === 'win32' ? '?' : 'โ‰Ÿ'), - COMBINING_ALL: chalk.cyan(process.platform === 'win32' ? 'U' : 'โˆช'), + CANDIDATE_GENERATING: chalk.cyan('ฮฃ'), + CANDIDATE_FILTERING: chalk.cyan('โˆ†'), + CANDIDATE_EXTENSION_CORRECTION: chalk.cyan('.'), + CANDIDATE_HASHING: chalk.yellow('#'), + CANDIDATE_VALIDATING: chalk.cyan(process.platform === 'win32' ? '?' : 'โ‰Ÿ'), + CANDIDATE_COMBINING: chalk.cyan(process.platform === 'win32' ? 'U' : 'โˆช'), + TESTING: chalk.yellow(process.platform === 'win32' ? '?' : 'โ‰Ÿ'), WRITING: chalk.yellow(process.platform === 'win32' ? 'ยป' : 'โœŽ'), RECYCLING: chalk.blue(process.platform === 'win32' ? 'ยป' : 'โ™ป'), DELETING: chalk.red(process.platform === 'win32' ? 'X' : 'โœ•'), @@ -37,31 +39,31 @@ export const ProgressBarSymbol = { * information about an operation. */ export default abstract class ProgressBar { - abstract reset(total: number): Promise; + abstract reset(total: number): void; - abstract setName(name: string): Promise; + abstract setName(name: string): void; - abstract setSymbol(symbol: string): Promise; + abstract setSymbol(symbol: string): void; abstract addWaitingMessage(waitingMessage: string): void; abstract removeWaitingMessage(waitingMessage: string): void; - abstract incrementTotal(increment: number): Promise; + abstract incrementTotal(increment: number): void; - abstract incrementProgress(): Promise; + abstract incrementProgress(): void; - abstract incrementDone(message?: string): Promise; + abstract incrementDone(message?: string): void; - abstract update(current: number, message?: string): Promise; + abstract update(current: number, message?: string): void; - abstract done(finishedMessage?: string): Promise; + abstract done(finishedMessage?: string): void; /** * Call the `done()` method with a completion message that indicates how many items were * processed. */ - async doneItems(count: number, noun: string, verb: string): Promise { + doneItems(count: number, noun: string, verb: string): void { let pluralSuffix = 's'; if (noun.toLowerCase().endsWith('ch') || noun.toLowerCase().endsWith('s') @@ -69,10 +71,10 @@ export default abstract class ProgressBar { pluralSuffix = 'es'; } - return this.done(`${count.toLocaleString()} ${noun.trim()}${count !== 1 ? pluralSuffix : ''} ${verb}`); + this.done(`${count.toLocaleString()} ${noun.trim()}${count !== 1 ? pluralSuffix : ''} ${verb}`); } - abstract withLoggerPrefix(prefix: string): ProgressBar; + abstract setLoggerPrefix(prefix: string): void; abstract log(logLevel: LogLevel, message: string): void; @@ -120,7 +122,7 @@ export default abstract class ProgressBar { return this.log(LogLevel.ERROR, message); } - abstract freeze(): Promise; + abstract freeze(): void; abstract delete(): void; } diff --git a/src/console/progressBarCli.ts b/src/console/progressBarCli.ts index 02b68229f..441ec4c7e 100644 --- a/src/console/progressBarCli.ts +++ b/src/console/progressBarCli.ts @@ -5,6 +5,7 @@ import cliProgress, { MultiBar } from 'cli-progress'; import wrapAnsi from 'wrap-ansi'; import ConsolePoly from '../polyfill/consolePoly.js'; +import TimePoly from '../polyfill/timePoly.js'; import Timer from '../timer.js'; import Logger from './logger.js'; import LogLevel from './logLevel.js'; @@ -24,11 +25,11 @@ export default class ProgressBarCLI extends ProgressBar { private static progressBars: ProgressBarCLI[] = []; - private static lastRedraw: [number, number] = [0, 0]; + private static lastRedraw: number = 0; private static logQueue: string[] = []; - private readonly logger: Logger; + private logger: Logger; private readonly payload: ProgressBarPayload; @@ -36,7 +37,7 @@ export default class ProgressBarCLI extends ProgressBar { private waitingMessageTimeout?: Timer; - private readonly waitingMessages: Set = new Set(); + private readonly waitingMessages: Map = new Map(); private constructor( logger: Logger, @@ -55,21 +56,24 @@ export default class ProgressBarCLI extends ProgressBar { /** * Create a new {@link ProgressBarCLI}, and initialize the {@link MultiBar} if it hasn't been yet. */ - static async new( + static new( logger: Logger, name: string, symbol: string, initialTotal = 0, - ): Promise { + ): ProgressBarCLI { if (!ProgressBarCLI.multiBar) { ProgressBarCLI.multiBar = new cliProgress.MultiBar({ stream: logger.getLogLevel() < LogLevel.NEVER ? logger.getStream() : new PassThrough(), - barsize: 25, + barsize: 20, fps: 1 / 60, // limit the automatic redraws forceRedraw: true, emptyOnZero: true, hideCursor: true, }, cliProgress.Presets.shades_grey); + process.on('exit', () => { + this.multiBar?.stop(); + }); } const initialPayload: ProgressBarPayload = { @@ -89,17 +93,17 @@ export default class ProgressBarCLI extends ProgressBar { initialPayload, ); const progressBarCLI = new ProgressBarCLI(logger, initialPayload, singleBarFormatted); - await progressBarCLI.render(true); + progressBarCLI.render(true); return progressBarCLI; } /** * Stop the {@link MultiBar} (and therefore everyProgressBar). */ - static async stop(): Promise { + static stop(): void { // Freeze (and delete) any lingering progress bars const progressBarsCopy = ProgressBarCLI.progressBars.slice(); - await Promise.all(progressBarsCopy.map(async (progressBar) => progressBar.freeze())); + progressBarsCopy.forEach((progressBar) => progressBar.freeze()); // Clear the last deleted, non-frozen progress bar ProgressBarCLI.multiBar?.log(' '); @@ -117,58 +121,65 @@ export default class ProgressBarCLI extends ProgressBar { * cli-progress clears previous output. * @see https://github.com/npkgz/cli-progress/issues/79 */ - async render(force = false): Promise { + render(force = false): void { this.singleBarFormatted?.getSingleBar().update(this.payload); - if (!force) { - // Limit the frequency of redrawing - const [elapsedSec, elapsedNano] = process.hrtime(ProgressBarCLI.lastRedraw); - const elapsedMs = (elapsedSec * 1_000_000_000 + elapsedNano) / 1_000_000; - if (elapsedMs < (1000 / ProgressBarCLI.FPS)) { - return; + const callback = (): void => { + // Dequeue all log messages + if (ProgressBarCLI.multiBar && ProgressBarCLI.logQueue.length > 0) { + const consoleWidth = ConsolePoly.consoleWidth(); + const logMessage = ProgressBarCLI.logQueue + // Wrapping is broken: https://github.com/npkgz/cli-progress/issues/142 + .map((msg) => wrapAnsi(msg, consoleWidth, { trim: false }) + // ...and if we manually wrap lines, we also need to deal with overwriting existing + // progress bar output. + .split('\n') + // TODO(cemmer): this appears to only overwrite the last line, not any others? + .join(`\n${this.logger.isTTY() ? '\x1b[K' : ''}`)) + .join('\n'); + ProgressBarCLI.multiBar.log(`${logMessage}\n`); + ProgressBarCLI.logQueue = []; } + + ProgressBarCLI.multiBar?.update(); + ProgressBarCLI.lastRedraw = TimePoly.hrtimeMillis(); + ProgressBarCLI.RENDER_MUTEX.cancel(); // cancel all waiting locks, we just redrew + }; + + if (force) { + callback(); + return; } - try { - await ProgressBarCLI.RENDER_MUTEX.runExclusive(() => { - // Dequeue all log messages - if (ProgressBarCLI.multiBar && ProgressBarCLI.logQueue.length > 0) { - const consoleWidth = ConsolePoly.consoleWidth(); - const logMessage = ProgressBarCLI.logQueue - // Wrapping is broken: https://github.com/npkgz/cli-progress/issues/142 - .map((msg) => wrapAnsi(msg, consoleWidth, { trim: false }) - // ...and if we manually wrap lines, we also need to deal with overwriting existing - // progress bar output. - .split('\n') - .join(`\n${this.logger.isTTY() ? '\x1b[K' : ''}`)) - .join('\n'); - ProgressBarCLI.multiBar.log(`${logMessage}\n`); - ProgressBarCLI.logQueue = []; - } + // Limit the frequency of redrawing + const elapsedMs = TimePoly.hrtimeMillis(ProgressBarCLI.lastRedraw); + if (elapsedMs < (1000 / ProgressBarCLI.FPS)) { + return; + } - ProgressBarCLI.multiBar?.update(); - ProgressBarCLI.lastRedraw = process.hrtime(); - ProgressBarCLI.RENDER_MUTEX.cancel(); // cancel all waiting locks, we just redrew - }); - } catch (error) { - if (error !== E_CANCELED) { - throw error; + setImmediate(async () => { + try { + await ProgressBarCLI.RENDER_MUTEX.runExclusive(callback); + } catch (error) { + if (error !== E_CANCELED) { + throw error; + } } - } + }); } /** * Reset the {@link ProgressBar}'s progress to zero and change its total. */ - async reset(total: number): Promise { + reset(total: number): void { this.singleBarFormatted?.getSingleBar().setTotal(total); this.singleBarFormatted?.getSingleBar().update(0); this.payload.inProgress = 0; this.payload.waitingMessage = undefined; - return this.render(true); + this.render(true); } - private async logPayload(): Promise { + private logPayload(): void { const name = this.payload.name ?? ''; const finishedMessageWrapped = this.payload.finishedMessage ?.split('\n') @@ -184,17 +195,23 @@ export default class ProgressBarCLI extends ProgressBar { LogLevel.ALWAYS, `${name}${finishedMessageWrapped ? ` ... ${finishedMessageWrapped}` : ''}`, ); - await this.render(true); + this.render(true); } - async setName(name: string): Promise { + setName(name: string): void { + if (this.payload.name === name) { + return; + } this.payload.name = name; - return this.render(true); + this.render(true); } - async setSymbol(symbol: string): Promise { + setSymbol(symbol: string): void { + if (this.payload.symbol === symbol) { + return; + } this.payload.symbol = symbol; - return this.render(true); + this.render(true); } /** @@ -205,9 +222,24 @@ export default class ProgressBarCLI extends ProgressBar { if (!this.singleBarFormatted) { return; } + this.waitingMessages.set(waitingMessage, TimePoly.hrtimeMillis()); + + if (!this.waitingMessageTimeout) { + this.waitingMessageTimeout = Timer.setInterval(() => { + const currentMillis = TimePoly.hrtimeMillis(); + const newWaitingMessagePair = [...this.waitingMessages] + .find(([, ms]) => currentMillis - ms >= 5000); - this.waitingMessages.add(waitingMessage); - this.setWaitingMessageTimeout(); + const newWaitingMessage = newWaitingMessagePair !== undefined + ? newWaitingMessagePair[0] + : undefined; + + if (newWaitingMessage !== this.payload.waitingMessage) { + this.payload.waitingMessage = newWaitingMessage; + this.render(true); + } + }, 1000 / ProgressBarCLI.FPS); + } } /** @@ -217,32 +249,13 @@ export default class ProgressBarCLI extends ProgressBar { if (!this.singleBarFormatted) { return; } - this.waitingMessages.delete(waitingMessage); - if (this.payload.waitingMessage) { - // Render immediately if the output could change - this.setWaitingMessageTimeout(0); - } - } - - private setWaitingMessageTimeout(timeout = 10_000): void { - this.waitingMessageTimeout?.cancel(); - - this.waitingMessageTimeout = Timer.setTimeout(async () => { - const total = this.singleBarFormatted?.getSingleBar().getTotal() ?? 0; - if (total <= 1) { - return; - } - - [this.payload.waitingMessage] = this.waitingMessages; - await this.render(true); - }, timeout); } /** * Increment the total by some amount. */ - async incrementTotal(increment: number): Promise { + incrementTotal(increment: number): void { if (!this.singleBarFormatted) { return; } @@ -250,39 +263,39 @@ export default class ProgressBarCLI extends ProgressBar { this.singleBarFormatted.getSingleBar().setTotal( this.singleBarFormatted.getSingleBar().getTotal() + increment, ); - await this.render(); + this.render(); } /** * Increment the in-progress count by one. */ - async incrementProgress(): Promise { + incrementProgress(): void { this.payload.inProgress = Math.max(this.payload.inProgress ?? 0, 0) + 1; - return this.render(); + this.render(); } /** * Decrement the in-progress count by one, and increment the completed count by one. */ - async incrementDone(): Promise { + incrementDone(): void { this.payload.inProgress = Math.max((this.payload.inProgress ?? 0) - 1, 0); this.singleBarFormatted?.getSingleBar().increment(); - return this.render(); + this.render(); } /** * Set the completed count. */ - async update(current: number): Promise { + update(current: number): void { this.singleBarFormatted?.getSingleBar().update(current); - return this.render(); + this.render(); } /** * Set the completed count to the total, and render any completion message. */ - async done(finishedMessage?: string): Promise { - await this.setSymbol(ProgressBarSymbol.DONE); + done(finishedMessage?: string): void { + this.setSymbol(ProgressBarSymbol.DONE); const total = this.singleBarFormatted?.getSingleBar().getTotal() ?? 0; if (total > 0) { @@ -296,18 +309,14 @@ export default class ProgressBarCLI extends ProgressBar { this.payload.finishedMessage = finishedMessage; } - await this.render(true); + this.render(true); } /** * Return a copy of this {@link ProgressBar} with a new string prefix. */ - withLoggerPrefix(prefix: string): ProgressBar { - return new ProgressBarCLI( - this.logger.withLoggerPrefix(prefix), - this.payload, - this.singleBarFormatted, - ); + setLoggerPrefix(prefix: string): void { + this.logger = this.logger.withLoggerPrefix(prefix); } /** @@ -335,13 +344,13 @@ export default class ProgressBarCLI extends ProgressBar { * at once. * @see https://github.com/npkgz/cli-progress/issues/59 */ - async freeze(): Promise { + freeze(): void { if (!this.singleBarFormatted) { - await this.logPayload(); + this.logPayload(); return; } - await this.render(true); + this.render(true); ProgressBarCLI.multiBar?.log(`${this.singleBarFormatted?.getLastOutput()}\n`); this.delete(); } diff --git a/src/console/singleBarFormatted.ts b/src/console/singleBarFormatted.ts index f2bfdeb6f..cf94b62c6 100644 --- a/src/console/singleBarFormatted.ts +++ b/src/console/singleBarFormatted.ts @@ -5,13 +5,15 @@ import { import { linearRegression, linearRegressionLine } from 'simple-statistics'; import stripAnsi from 'strip-ansi'; +import ConsolePoly from '../polyfill/consolePoly.js'; +import TimePoly from '../polyfill/timePoly.js'; import ProgressBarPayload from './progressBarPayload.js'; /** * A wrapper class for a cli-progress {@link SingleBar} that formats the output. */ export default class SingleBarFormatted { - public static readonly MAX_NAME_LENGTH = 30; + public static readonly MAX_NAME_LENGTH = 35; public static readonly BAR_COMPLETE_CHAR = '\u2588'; @@ -27,7 +29,7 @@ export default class SingleBarFormatted { private valueTimeBuffer: number[][] = []; - private lastEtaTime: [number, number] = [0, 0]; + private lastEtaTime: number = 0; private lastEtaValue = 'infinity'; @@ -35,18 +37,26 @@ export default class SingleBarFormatted { this.multiBar = multiBar; this.singleBar = this.multiBar.create(initialTotal, 0, initialPayload, { format: (options, params, payload: ProgressBarPayload): string => { - const symbolAndName = SingleBarFormatted.getSymbolAndName(payload); + const symbolAndName = `${SingleBarFormatted.getSymbolAndName(payload)} | `; + const progressWrapped = this.getProgress(options, params, payload) .split('\n') .map((line, idx) => { + // Wrapping is broken: https://github.com/npkgz/cli-progress/issues/142 + let lineTrimmed = line; + const maxLineLength = ConsolePoly.consoleWidth() - stripAnsi(symbolAndName).length - 2; + if (line.length > maxLineLength) { + lineTrimmed = `${line.slice(0, maxLineLength - 3)}...`; + } + if (idx === 0) { - return line; + return lineTrimmed; } - return ' '.repeat(stripAnsi(symbolAndName).length + 3) + line; + return ' '.repeat(stripAnsi(symbolAndName).length) + lineTrimmed; }) .join('\n\x1b[K'); - this.lastOutput = `${symbolAndName} | ${progressWrapped}`.trim(); + this.lastOutput = `${symbolAndName}${progressWrapped}`.trim(); return this.lastOutput // cli-progress doesn't handle multi-line progress bars, collapse to one line. The multi- // line message will get logged correctly when the progress bar is frozen & logged. @@ -156,12 +166,11 @@ export default class SingleBarFormatted { private getEtaFormatted(etaSeconds: number): string { // Rate limit how often the ETA can change // Update only every 5s if the ETA is >60s - const [elapsedSec, elapsedNano] = process.hrtime(this.lastEtaTime); - const elapsedMs = (elapsedSec * 1_000_000_000 + elapsedNano) / 1_000_000; + const elapsedMs = TimePoly.hrtimeMillis(this.lastEtaTime); if (etaSeconds > 60 && elapsedMs < 5000) { return this.lastEtaValue; } - this.lastEtaTime = process.hrtime(); + this.lastEtaTime = TimePoly.hrtimeMillis(); if (etaSeconds < 0) { this.lastEtaValue = 'infinity'; diff --git a/src/driveSemaphore.ts b/src/driveSemaphore.ts index a9377d72b..05aba15ba 100644 --- a/src/driveSemaphore.ts +++ b/src/driveSemaphore.ts @@ -3,6 +3,7 @@ import path from 'node:path'; import async, { AsyncResultCallback } from 'async'; import { Mutex, Semaphore } from 'async-mutex'; +import ElasticSemaphore from './elasticSemaphore.js'; import Defaults from './globals/defaults.js'; import FsPoly from './polyfill/fsPoly.js'; import File from './types/files/file.js'; @@ -12,36 +13,109 @@ import File from './types/files/file.js'; * once per hard drive. */ export default class DriveSemaphore { - private readonly keySemaphores = new Map(); + private readonly driveSemaphores = new Map(); - private readonly keySemaphoresMutex = new Mutex(); - - private readonly threads: number; + private readonly driveSemaphoresMutex = new Mutex(); private readonly threadsSemaphore: Semaphore; - constructor(threads = 1) { - this.threads = threads; + constructor(threads: number) { this.threadsSemaphore = new Semaphore(threads); } + getValue(): number { + return this.threadsSemaphore.getValue(); + } + + setValue(threads: number): void { + this.threadsSemaphore.setValue(threads); + } + + /** + * Run a {@link runnable} exclusively for the given {@link file}. + */ + async runExclusive( + file: File | string, + runnable: () => V | Promise, + ): Promise { + const filePathDisk = DriveSemaphore.getDiskForFile(file); + const driveSemaphore = await this.driveSemaphoresMutex.runExclusive(() => { + if (!this.driveSemaphores.has(filePathDisk)) { + // WARN(cemmer): there is an undocumented semaphore max value that can be used, the full + // 4,700,372,992 bytes of a DVD+R will cause runExclusive() to never run or return. + let maxKilobytes = Defaults.MAX_READ_WRITE_CONCURRENT_KILOBYTES; + + if (FsPoly.isSamba(filePathDisk)) { + // Forcefully limit the number of files to be processed concurrently from a single + // Samba network share + maxKilobytes = 1; + } + + this.driveSemaphores.set(filePathDisk, new ElasticSemaphore(maxKilobytes)); + } + + return this.driveSemaphores.get(filePathDisk) as ElasticSemaphore; + }); + + const fileSizeKilobytes = (file instanceof File && file.getSize() > 0 + ? file.getSize() + : await FsPoly.size(file instanceof File ? file.getFilePath() : file) + ) / 1024; + + // First, limit the number of threads per drive, which will better balance the processing of + // files on different drives vs. processing files sequentially + return driveSemaphore.runExclusive( + // Second, limit the overall number of threads + async () => this.threadsSemaphore.runExclusive( + async () => runnable(), + ), + fileSizeKilobytes, + ); + } + /** * Run some {@link runnable} for every value in {@link files}. */ async map( files: K[], - runnable: (file: K) => (V | Promise), + runnable: (file: K) => V | Promise, ): Promise { - const disks = FsPoly.disksSync(); + // Sort the files, then "stripe" them by their disk path for fair processing among disks + const disksToFiles = files + // Remember the original ordering of the files by its index + .map((file, idx) => ([file, idx] satisfies [K, number])) + .sort(([a], [b]) => { + const aPath = a instanceof File ? a.getFilePath() : a.toString(); + const bPath = b instanceof File ? b.getFilePath() : b.toString(); + return aPath.localeCompare(bPath); + }) + .reduce((map, [file, idx]) => { + const key = DriveSemaphore.getDiskForFile(file); + if (!map.has(key)) { + map.set(key, [[file, idx]]); + } else { + map.get(key)?.push([file, idx]); + } + return map; + }, new Map()); + const maxFilesOnAnyDisk = [...disksToFiles.values()] + .reduce((max, filesForDisk) => Math.max(max, filesForDisk.length), 0); + let filesStriped: [K, number][] = []; + const chunkSize = 5; + for (let i = 0; i < maxFilesOnAnyDisk; i += chunkSize) { + const batch = [...disksToFiles.values()] + .flatMap((filesForDisk) => filesForDisk.splice(0, chunkSize)); + filesStriped = [...filesStriped, ...batch]; + } // Limit the number of ongoing threads to something reasonable - return async.mapLimit( - files, + const results = await async.mapLimit( + filesStriped, Defaults.MAX_FS_THREADS, - async (file, callback: AsyncResultCallback) => { + async ([file, idx], callback: AsyncResultCallback<[V, number], Error>) => { try { - const val = await this.processFile(file, runnable, disks); - callback(undefined, val); + const val = await this.runExclusive(file, async () => runnable(file)); + callback(undefined, [val, idx]); } catch (error) { if (error instanceof Error) { callback(error); @@ -53,48 +127,29 @@ export default class DriveSemaphore { } }, ); + + // Put the values back in order + return results + .sort(([, aIdx], [, bIdx]) => aIdx - bIdx) + .map(([result]) => result); } - private async processFile( - file: K, - runnable: (file: K) => (V | Promise), - disks: string[], - ): Promise { + private static getDiskForFile(file: File | string): string { const filePath = file instanceof File ? file.getFilePath() : file as string; const filePathNormalized = filePath.replace(/[\\/]/g, path.sep); - const filePathResolved = path.resolve(filePathNormalized); // Try to get the path of the drive this file is on - let filePathDisk = disks.find((disk) => filePathResolved.startsWith(disk)) ?? ''; - - if (!filePathDisk) { - // If a drive couldn't be found, try to parse a samba server name - const sambaMatches = filePathNormalized.match(/^([\\/]{2}[^\\/]+)/); - if (sambaMatches !== null) { - [, filePathDisk] = sambaMatches; - } + const filePathDisk = FsPoly.diskResolved(filePathNormalized); + if (filePathDisk !== undefined) { + return filePathDisk; } - const keySemaphore = await this.keySemaphoresMutex.runExclusive(() => { - if (!this.keySemaphores.has(filePathDisk)) { - let { threads } = this; - if (FsPoly.isSamba(filePathDisk)) { - // Forcefully limit the number of files to be processed concurrently from a single - // Samba network share - threads = 1; - } - this.keySemaphores.set(filePathDisk, new Semaphore(threads)); - } - return this.keySemaphores.get(filePathDisk) as Semaphore; - }); + // If a drive couldn't be found, try to parse a samba server name + const sambaMatches = filePathNormalized.match(/^([\\/]{2}[^\\/]+)/); + if (sambaMatches !== null) { + return sambaMatches[1]; + } - // First, limit the number of threads per drive, which will better balance the processing of - // files on different drives vs. processing files sequentially - return keySemaphore.runExclusive( - // Second, limit the overall number of threads - async () => this.threadsSemaphore.runExclusive( - async () => runnable(file), - ), - ); + return ''; } } diff --git a/src/elasticSemaphore.ts b/src/elasticSemaphore.ts index 36250933e..95ebf55de 100644 --- a/src/elasticSemaphore.ts +++ b/src/elasticSemaphore.ts @@ -1,49 +1,35 @@ -import { Mutex, Semaphore } from 'async-mutex'; +import { Semaphore } from 'async-mutex'; /** * Wrapper for an `async-mutex` {@link Semaphore} that can have its total increased if a weight * exceeds the maximum. */ export default class ElasticSemaphore { - private readonly valueMutex = new Mutex(); - - private value: number; + private readonly semaphoreValue: number; private readonly semaphore: Semaphore; constructor(value: number) { - this.value = Math.ceil(value); - this.semaphore = new Semaphore(this.value); + this.semaphoreValue = Math.ceil(value); + this.semaphore = new Semaphore(this.semaphoreValue); } /** * Run some {@link callback} with a required {@link weight}. */ - async runExclusive(callback: (value: number) => Promise | T, weight: number): Promise { + async runExclusive( + callback: (value: number) => Promise | T, + weight: number, + ): Promise { const weightNormalized = Math.max(1, Math.ceil(weight)); - // If the weight of this call isn't even 1% of the max value then don't incur the overhead - // of a semaphore - if ((weightNormalized / this.value) * 100 < 1) { - return callback(this.semaphore.getValue()); - } - - // If the weight of this call is larger than the max value then we need to increase the max - if (weightNormalized > this.value) { - await this.valueMutex.runExclusive(() => { - const increase = weightNormalized - this.value; - if (increase <= 0) { - // A competing runnable already increased this semaphore's value - return; - } - this.semaphore.setValue(this.semaphore.getValue() + increase); - this.value += increase; - }); - } - // NOTE(cemmer): this semaphore can take a measurable amount of time to actually call the // callback. This is particularly noticeable when using single threads (e.g. via Async.js). // Try to only use semaphores to traffic cop multiple concurrent threads. - return this.semaphore.runExclusive(callback, weightNormalized); + return this.semaphore.runExclusive( + callback, + // If the weight of this call is larger than the max value then just use the max value + weightNormalized > this.semaphoreValue ? this.semaphoreValue : weightNormalized, + ); } } diff --git a/src/globals/temp.ts b/src/globals/temp.ts index 59f543252..9b8ab88d8 100644 --- a/src/globals/temp.ts +++ b/src/globals/temp.ts @@ -23,9 +23,8 @@ export default class Temp { } } -process.once('beforeExit', async () => { - // WARN: Jest won't call this: https://github.com/jestjs/jest/issues/10927 - await FsPoly.rm(Temp.getTempDir(), { +process.once('exit', () => { + FsPoly.rmSync(Temp.getTempDir(), { force: true, recursive: true, }); diff --git a/src/igir.ts b/src/igir.ts index a122ebc7a..1b8257669 100644 --- a/src/igir.ts +++ b/src/igir.ts @@ -88,33 +88,44 @@ export default class Igir { this.logger.trace('Windows has symlink permissions'); } + if (this.options.shouldLink() && !this.options.getSymlink()) { + const outputDirRoot = this.options.getOutputDirRoot(); + if (!await FsPoly.canHardlink(outputDirRoot)) { + const outputDisk = FsPoly.diskResolved(outputDirRoot); + throw new ExpectedError(`${outputDisk} does not support hard-linking`); + } + } + // File cache options + const fileCache = new FileCache(); if (this.options.getDisableCache()) { this.logger.trace('disabling the file cache'); - FileCache.disable(); + fileCache.disable(); } else { const cachePath = await this.getCachePath(); if (cachePath !== undefined && process.env.NODE_ENV !== 'test') { this.logger.trace(`loading the file cache at '${cachePath}'`); - await FileCache.loadFile(cachePath); + await fileCache.loadFile(cachePath); } else { this.logger.trace('not using a file for the file cache'); } } + const fileFactory = new FileFactory(fileCache); // Scan and process input files - let dats = await this.processDATScanner(); + let dats = await this.processDATScanner(fileFactory); const indexedRoms = await this.processROMScanner( + fileFactory, this.determineScanningBitmask(dats), this.determineScanningChecksumArchives(dats), ); const roms = indexedRoms.getFiles(); - const patches = await this.processPatchScanner(); + const patches = await this.processPatchScanner(fileFactory); // Set up progress bar and input for DAT processing - const datProcessProgressBar = await this.logger.addProgressBar(chalk.underline('Processing DATs'), ProgressBarSymbol.NONE, dats.length); + const datProcessProgressBar = this.logger.addProgressBar(chalk.underline('Processing DATs'), ProgressBarSymbol.NONE, dats.length); if (dats.length === 0) { - dats = new DATGameInferrer(this.options, datProcessProgressBar).infer(roms); + dats = await new DATGameInferrer(this.options, datProcessProgressBar).infer(roms); } const datsToWrittenFiles = new Map(); @@ -125,22 +136,23 @@ export default class Igir { // Process every DAT datProcessProgressBar.logTrace(`processing ${dats.length.toLocaleString()} DAT${dats.length !== 1 ? 's' : ''}`); await async.eachLimit(dats, this.options.getDatThreads(), async (dat, callback) => { - await datProcessProgressBar.incrementProgress(); + datProcessProgressBar.incrementProgress(); - const progressBar = await this.logger.addProgressBar( + const progressBar = this.logger.addProgressBar( dat.getNameShort(), ProgressBarSymbol.WAITING, dat.getParents().length, ); - const datWithParents = await new DATParentInferrer(this.options, progressBar).infer(dat); - const mergedSplitDat = await new DATMergerSplitter(this.options, progressBar) + const datWithParents = new DATParentInferrer(this.options, progressBar).infer(dat); + const mergedSplitDat = new DATMergerSplitter(this.options, progressBar) .merge(datWithParents); - const filteredDat = await new DATFilter(this.options, progressBar).filter(mergedSplitDat); + const filteredDat = new DATFilter(this.options, progressBar).filter(mergedSplitDat); // Generate and filter ROM candidates const parentsToCandidates = await this.generateCandidates( progressBar, + fileFactory, filteredDat, indexedRoms, patches, @@ -180,7 +192,7 @@ export default class Igir { const datStatus = new StatusGenerator(this.options, progressBar) .generate(filteredDat, parentsToCandidates); datsStatuses.push(datStatus); - await progressBar.done([ + progressBar.done([ datStatus.toConsole(this.options), dir2DatPath ? `dir2dat: ${dir2DatPath}` : undefined, fixdatPath ? `Fixdat: ${fixdatPath}` : undefined, @@ -190,17 +202,17 @@ export default class Igir { const totalReleaseCandidates = [...parentsToCandidates.values()] .reduce((sum, rcs) => sum + rcs.length, 0); if (totalReleaseCandidates > 0) { - await progressBar.freeze(); + progressBar.freeze(); } else { progressBar.delete(); } - await datProcessProgressBar.incrementDone(); + datProcessProgressBar.incrementDone(); callback(); }); datProcessProgressBar.logTrace(`done processing ${dats.length.toLocaleString()} DAT${dats.length !== 1 ? 's' : ''}`); - await datProcessProgressBar.doneItems(dats.length, 'DAT', 'processed'); + datProcessProgressBar.doneItems(dats.length, 'DAT', 'processed'); datProcessProgressBar.delete(); // Delete moved ROMs @@ -212,19 +224,23 @@ export default class Igir { // Generate the report await this.processReportGenerator(roms, cleanedOutputFiles, datsStatuses); - await ProgressBarCLI.stop(); + ProgressBarCLI.stop(); Timer.cancelAll(); } private async getCachePath(): Promise { - const defaultFileName = `${Package.NAME}.cache`; + const defaultFileName = process.versions.bun + // As of v1.1.26, Bun uses a different serializer than V8, making cache files incompatible + // @see https://bun.sh/docs/runtime/nodejs-apis + ? `${Package.NAME}.bun.cache` + : `${Package.NAME}.cache`; - // Try to use the provided path + // First, try to use the provided path let cachePath = this.options.getCachePath(); if (cachePath !== undefined && await FsPoly.isDirectory(cachePath)) { cachePath = path.join(cachePath, defaultFileName); - this.logger.warn(`A directory was provided for cache path instead of a file, using '${cachePath}' instead`); + this.logger.warn(`A directory was provided for the cache path instead of a file, using '${cachePath}' instead`); } if (cachePath !== undefined) { if (await FsPoly.isWritable(cachePath)) { @@ -233,22 +249,36 @@ export default class Igir { this.logger.warn('Provided cache path isn\'t writable, using the default path'); } - // Otherwise, use a default path - return [ + const cachePathCandidates = [ path.join(path.resolve(Package.DIRECTORY), defaultFileName), path.join(os.homedir(), defaultFileName), path.join(process.cwd(), defaultFileName), ] .filter((filePath) => filePath && !filePath.startsWith(os.tmpdir())) - .find(async (filePath) => { - if (await FsPoly.exists(filePath)) { - return true; - } - return FsPoly.isWritable(filePath); - }); + .reduce(ArrayPoly.reduceUnique(), []); + + // Next, try to use an already existing path + const exists = await Promise.all( + cachePathCandidates.map(async (pathCandidate) => FsPoly.exists(pathCandidate)), + ); + const existsCachePath = cachePathCandidates.find((_, idx) => exists[idx]); + if (existsCachePath !== undefined) { + return existsCachePath; + } + + // Next, try to find a writable path + const writable = await Promise.all( + cachePathCandidates.map(async (pathCandidate) => FsPoly.isWritable(pathCandidate)), + ); + const writableCachePath = cachePathCandidates.find((_, idx) => writable[idx]); + if (writableCachePath !== undefined) { + return writableCachePath; + } + + return undefined; } - private async processDATScanner(): Promise { + private async processDATScanner(fileFactory: FileFactory): Promise { if (this.options.shouldDir2Dat()) { return []; } @@ -257,8 +287,8 @@ export default class Igir { return []; } - const progressBar = await this.logger.addProgressBar('Scanning for DATs'); - let dats = await new DATScanner(this.options, progressBar).scan(); + const progressBar = this.logger.addProgressBar('Scanning for DATs'); + let dats = await new DATScanner(this.options, progressBar, fileFactory).scan(); if (dats.length === 0) { throw new ExpectedError('No valid DAT files found!'); } @@ -275,17 +305,24 @@ export default class Igir { } if (this.options.getDatCombine()) { - await progressBar.reset(1); + progressBar.reset(1); dats = [new DATCombiner(progressBar).combine(dats)]; } - await progressBar.doneItems(dats.length, 'DAT', 'found'); - await progressBar.freeze(); + progressBar.doneItems(dats.length, 'DAT', this.options.getDatCombine() ? 'combined' : 'found'); + progressBar.freeze(); return dats; } private determineScanningBitmask(dats: DAT[]): number { - const minimumChecksum = this.options.getInputMinChecksum() ?? ChecksumBitmask.CRC32; + const minimumChecksum = this.options.getInputChecksumMin() ?? ChecksumBitmask.NONE; + const maximumChecksum = this.options.getInputChecksumMax() + ?? Object.keys(ChecksumBitmask) + .filter((bitmask): bitmask is keyof typeof ChecksumBitmask => Number.isNaN(Number(bitmask))) + .map((bitmask) => ChecksumBitmask[bitmask]) + .at(-1) + ?? minimumChecksum; + let matchChecksum = minimumChecksum; if (this.options.getPatchFileCount() > 0) { @@ -307,20 +344,43 @@ export default class Igir { } dats.forEach((dat) => { - const datMinimumBitmask = dat.getRequiredChecksumBitmask(); + const datMinimumRomBitmask = dat.getRequiredRomChecksumBitmask(); + Object.keys(ChecksumBitmask) + .filter((bitmask): bitmask is keyof typeof ChecksumBitmask => Number.isNaN(Number(bitmask))) + // Has not been enabled yet + .filter((bitmask) => ChecksumBitmask[bitmask] > minimumChecksum + && ChecksumBitmask[bitmask] <= maximumChecksum) + .filter((bitmask) => !(matchChecksum & ChecksumBitmask[bitmask])) + // Should be enabled for this DAT + .filter((bitmask) => datMinimumRomBitmask & ChecksumBitmask[bitmask]) + .forEach((bitmask) => { + matchChecksum |= ChecksumBitmask[bitmask]; + this.logger.trace(`${dat.getNameShort()}: needs ${bitmask} file checksums for ROMs, enabling`); + }); + + if (this.options.getExcludeDisks()) { + return; + } + const datMinimumDiskBitmask = dat.getRequiredDiskChecksumBitmask(); Object.keys(ChecksumBitmask) .filter((bitmask): bitmask is keyof typeof ChecksumBitmask => Number.isNaN(Number(bitmask))) // Has not been enabled yet - .filter((bitmask) => ChecksumBitmask[bitmask] > minimumChecksum) + .filter((bitmask) => ChecksumBitmask[bitmask] > minimumChecksum + && ChecksumBitmask[bitmask] <= maximumChecksum) .filter((bitmask) => !(matchChecksum & ChecksumBitmask[bitmask])) // Should be enabled for this DAT - .filter((bitmask) => datMinimumBitmask & ChecksumBitmask[bitmask]) + .filter((bitmask) => datMinimumDiskBitmask & ChecksumBitmask[bitmask]) .forEach((bitmask) => { matchChecksum |= ChecksumBitmask[bitmask]; - this.logger.trace(`${dat.getNameShort()}: needs ${bitmask} file checksums, enabling`); + this.logger.trace(`${dat.getNameShort()}: needs ${bitmask} file checksums for disks, enabling`); }); }); + if (matchChecksum === ChecksumBitmask.NONE) { + matchChecksum |= ChecksumBitmask.CRC32; + this.logger.trace('at least one checksum algorithm is required, enabling CRC32 file checksums'); + } + return matchChecksum; } @@ -344,44 +404,48 @@ export default class Igir { } private async processROMScanner( + fileFactory: FileFactory, checksumBitmask: number, checksumArchives: boolean, ): Promise { const romScannerProgressBarName = 'Scanning for ROMs'; - const romProgressBar = await this.logger.addProgressBar(romScannerProgressBarName); + const romProgressBar = this.logger.addProgressBar(romScannerProgressBarName); - const rawRomFiles = await new ROMScanner(this.options, romProgressBar) + const rawRomFiles = await new ROMScanner(this.options, romProgressBar, fileFactory) .scan(checksumBitmask, checksumArchives); - await romProgressBar.setName('Detecting ROM headers'); - const romFilesWithHeaders = await new ROMHeaderProcessor(this.options, romProgressBar) - .process(rawRomFiles); + romProgressBar.setName('Detecting ROM headers'); + const romFilesWithHeaders = await new ROMHeaderProcessor( + this.options, + romProgressBar, + fileFactory, + ).process(rawRomFiles); - await romProgressBar.setName('Indexing ROMs'); - const indexedRomFiles = await new ROMIndexer(this.options, romProgressBar) - .index(romFilesWithHeaders); + romProgressBar.setName('Indexing ROMs'); + const indexedRomFiles = new ROMIndexer(this.options, romProgressBar).index(romFilesWithHeaders); - await romProgressBar.setName(romScannerProgressBarName); // reset - await romProgressBar.doneItems(romFilesWithHeaders.length, 'file', 'found'); - await romProgressBar.freeze(); + romProgressBar.setName(romScannerProgressBarName); // reset + romProgressBar.doneItems(romFilesWithHeaders.length, 'file', 'found'); + romProgressBar.freeze(); return indexedRomFiles; } - private async processPatchScanner(): Promise { + private async processPatchScanner(fileFactory: FileFactory): Promise { if (!this.options.getPatchFileCount()) { return []; } - const progressBar = await this.logger.addProgressBar('Scanning for patches'); - const patches = await new PatchScanner(this.options, progressBar).scan(); - await progressBar.doneItems(patches.length, 'patch', 'found'); - await progressBar.freeze(); + const progressBar = this.logger.addProgressBar('Scanning for patches'); + const patches = await new PatchScanner(this.options, progressBar, fileFactory).scan(); + progressBar.doneItems(patches.length, 'patch', 'found'); + progressBar.freeze(); return patches; } private async generateCandidates( progressBar: ProgressBar, + fileFactory: FileFactory, dat: DAT, indexedRoms: IndexedFiles, patches: Patch[], @@ -392,30 +456,34 @@ export default class Igir { const patchedCandidates = await new CandidatePatchGenerator(progressBar) .generate(dat, candidates, patches); - const preferredCandidates = await new CandidatePreferer(this.options, progressBar) + const preferredCandidates = new CandidatePreferer(this.options, progressBar) .prefer(dat, patchedCandidates); const extensionCorrectedCandidates = await new CandidateExtensionCorrector( this.options, progressBar, + fileFactory, ).correct(dat, preferredCandidates); // Delay calculating checksums for {@link ArchiveFile}s until after {@link CandidatePreferer} // for efficiency - const hashedCandidates = await new CandidateArchiveFileHasher(this.options, progressBar) - .hash(dat, extensionCorrectedCandidates); + const hashedCandidates = await new CandidateArchiveFileHasher( + this.options, + progressBar, + fileFactory, + ).hash(dat, extensionCorrectedCandidates); - const postProcessedCandidates = await new CandidatePostProcessor(this.options, progressBar) + const postProcessedCandidates = new CandidatePostProcessor(this.options, progressBar) .process(dat, hashedCandidates); - const invalidCandidates = await new CandidateValidator(progressBar) + const invalidCandidates = new CandidateValidator(progressBar) .validate(dat, postProcessedCandidates); if (invalidCandidates.length > 0) { // Return zero candidates if any candidates failed to validate return new Map(); } - await new CandidateMergeSplitValidator(this.options, progressBar) + new CandidateMergeSplitValidator(this.options, progressBar) .validate(dat, postProcessedCandidates); return new CandidateCombiner(this.options, progressBar) @@ -457,11 +525,15 @@ export default class Igir { return; } - const progressBar = await this.logger.addProgressBar('Deleting moved files'); + const progressBar = this.logger.addProgressBar('Deleting moved files'); const deletedFilePaths = await new MovedROMDeleter(progressBar) .delete(rawRomFiles, movedRomsToDelete, datsToWrittenFiles); - await progressBar.doneItems(deletedFilePaths.length, 'moved file', 'deleted'); - await progressBar.freeze(); + progressBar.doneItems(deletedFilePaths.length, 'moved file', 'deleted'); + if (deletedFilePaths.length > 0) { + progressBar.freeze(); + } else { + progressBar.delete(); + } } private async processOutputCleaner( @@ -475,13 +547,13 @@ export default class Igir { return []; } - const progressBar = await this.logger.addProgressBar('Cleaning output directory'); + const progressBar = this.logger.addProgressBar('Cleaning output directory'); const uniqueDirsToClean = dirsToClean.reduce(ArrayPoly.reduceUnique(), []); const writtenFilesToExclude = [...datsToWrittenFiles.values()].flat(); const filesCleaned = await new DirectoryCleaner(this.options, progressBar) .clean(uniqueDirsToClean, writtenFilesToExclude); - await progressBar.doneItems(filesCleaned.length, 'file', 'recycled'); - await progressBar.freeze(); + progressBar.doneItems(filesCleaned.length, 'file', 'recycled'); + progressBar.freeze(); return filesCleaned; } @@ -494,7 +566,7 @@ export default class Igir { return; } - const reportProgressBar = await this.logger.addProgressBar('Generating report', ProgressBarSymbol.WRITING); + const reportProgressBar = this.logger.addProgressBar('Generating report', ProgressBarSymbol.WRITING); await new ReportGenerator(this.options, reportProgressBar).generate( scannedRomFiles, cleanedOutputFiles, diff --git a/src/keyedMutex.ts b/src/keyedMutex.ts new file mode 100644 index 000000000..6a1188449 --- /dev/null +++ b/src/keyedMutex.ts @@ -0,0 +1,58 @@ +import { Mutex } from 'async-mutex'; + +/** + * Wrapper for `async-mutex` {@link Mutex}es to run code exclusively for a key. + */ +export default class KeyedMutex { + private readonly keyMutexes = new Map(); + + private readonly keyMutexesMutex = new Mutex(); + + private keyMutexesLru: Set = new Set(); + + private readonly maxSize?: number; + + constructor(maxSize?: number) { + this.maxSize = maxSize; + } + + /** + * Run a {@link runnable} exclusively across all keys. + */ + async runExclusiveGlobally( + runnable: () => V | Promise, + ): Promise { + return this.keyMutexesMutex.runExclusive(runnable); + } + + /** + * Run a {@link runnable} exclusively for the given {@link key}. + */ + async runExclusiveForKey( + key: string, + runnable: () => V | Promise, + ): Promise { + const keyMutex = await this.runExclusiveGlobally(() => { + if (!this.keyMutexes.has(key)) { + this.keyMutexes.set(key, new Mutex()); + + // Expire least recently used keys + [...this.keyMutexesLru] + .filter((lruKey) => !this.keyMutexes.get(lruKey)?.isLocked()) + .slice(this.maxSize ?? Number.MAX_SAFE_INTEGER) + .forEach((lruKey) => { + this.keyMutexes.delete(lruKey); + this.keyMutexesLru.delete(lruKey); + }); + } + + // Mark this key as recently used + this.keyMutexesLru.delete(key); + this.keyMutexesLru = new Set([key, ...this.keyMutexesLru]); + + return this.keyMutexes.get(key) as Mutex; + }); + + return keyMutex.runExclusive(runnable); + } +} diff --git a/src/modules/argumentsParser.ts b/src/modules/argumentsParser.ts index cdab0f0fe..2b28a01c3 100644 --- a/src/modules/argumentsParser.ts +++ b/src/modules/argumentsParser.ts @@ -15,7 +15,7 @@ import Options, { FixExtension, GameSubdirMode, InputChecksumArchivesMode, - MergeMode, + MergeMode, PreferRevision, } from '../types/options.js'; import PatchFactory from '../types/patches/patchFactory.js'; @@ -79,7 +79,7 @@ export default class ArgumentsParser { const groupRomZip = 'zip command options:'; const groupRomLink = 'link command options:'; const groupRomHeader = 'ROM header options:'; - const groupRomSet = 'ROM set options:'; + const groupRomSet = 'ROM set options (requires DATs):'; const groupRomFiltering = 'ROM filtering options:'; const groupRomPriority = 'One game, one ROM (1G1R) options:'; const groupReport = 'report command options:'; @@ -87,11 +87,10 @@ export default class ArgumentsParser { // Add every command to a yargs object, recursively, resulting in the ability to specify // multiple commands - const commands: [string, string | boolean][] = [ + const commands = [ ['copy', 'Copy ROM files from the input to output directory'], ['move', 'Move ROM files from the input to output directory'], ['link', 'Create links in the output directory to ROM files in the input directory'], - ['symlink', false], ['extract', 'Extract ROM files in archives when copying or moving'], ['zip', 'Create zip archives of ROMs when copying or moving'], ['test', 'Test ROMs for accuracy after writing them to the output directory'], @@ -102,9 +101,9 @@ export default class ArgumentsParser { ]; const mutuallyExclusiveCommands = [ // Write commands - ['copy', 'move', 'link', 'symlink'], + ['copy', 'move', 'link'], // Archive manipulation commands - ['link', 'symlink', 'extract', 'zip'], + ['link', 'extract', 'zip'], // DAT writing commands ['dir2dat', 'fixdat'], ]; @@ -124,18 +123,10 @@ export default class ArgumentsParser { return !incompatibleCommands.includes(command); }) .forEach(([command, description]) => { - if (typeof description === 'string') { - yargsObj.command(command, description, (yargsSubObj) => addCommands( - yargsSubObj, - [...previousCommands, command], - )); - } else { - // A deprecation message should be printed elsewhere - yargsObj.command(command, false, (yargsSubObj) => addCommands( - yargsSubObj, - [...previousCommands, command], - )); - } + yargsObj.command(command, description, (yargsSubObj) => addCommands( + yargsSubObj, + [...previousCommands, command], + )); }); if (previousCommands.length === 0) { @@ -149,10 +140,6 @@ export default class ArgumentsParser { middlewareArgv._ = middlewareArgv._.reduce(ArrayPoly.reduceUnique(), []); }, true) .check((checkArgv) => { - if (checkArgv.help) { - return true; - } - ['extract', 'zip'].forEach((command) => { if (checkArgv._.includes(command) && ['copy', 'move'].every((write) => !checkArgv._.includes(write))) { throw new ExpectedError(`Command "${command}" also requires the commands copy or move`); @@ -160,7 +147,7 @@ export default class ArgumentsParser { }); ['test', 'clean'].forEach((command) => { - if (checkArgv._.includes(command) && ['copy', 'move', 'link', 'symlink'].every((write) => !checkArgv._.includes(write))) { + if (checkArgv._.includes(command) && ['copy', 'move', 'link'].every((write) => !checkArgv._.includes(write))) { throw new ExpectedError(`Command "${command}" requires one of the commands: copy, move, or link`); } }); @@ -192,7 +179,7 @@ export default class ArgumentsParser { requiresArg: true, }) .check((checkArgv) => { - const needInput = ['copy', 'move', 'link', 'symlink', 'extract', 'zip', 'test', 'dir2dat', 'fixdat'].filter((command) => checkArgv._.includes(command)); + const needInput = ['copy', 'move', 'link', 'extract', 'zip', 'test', 'dir2dat', 'fixdat'].filter((command) => checkArgv._.includes(command)); if (!checkArgv.input && needInput.length > 0) { // TODO(cememr): print help message throw new ExpectedError(`Missing required argument for command${needInput.length !== 1 ? 's' : ''} ${needInput.join(', ')}: --input `); @@ -206,7 +193,22 @@ export default class ArgumentsParser { type: 'array', requiresArg: true, }) - .option('input-min-checksum', { + .option('input-checksum-quick', { + group: groupRomInput, + description: 'Only read checksums from archive headers, don\'t decompress to calculate', + type: 'boolean', + }) + .check((checkArgv) => { + // Re-implement `conflicts: 'input-checksum-min'`, which isn't possible with a default value + if (checkArgv['input-checksum-quick'] && checkArgv['input-checksum-min'] !== ChecksumBitmask[ChecksumBitmask.CRC32].toUpperCase()) { + throw new ExpectedError('Arguments input-checksum-quick and input-checksum-min are mutually exclusive'); + } + if (checkArgv['input-checksum-quick'] && checkArgv['input-checksum-max']) { + throw new ExpectedError('Arguments input-checksum-quick and input-checksum-max are mutually exclusive'); + } + return true; + }) + .option('input-checksum-min', { group: groupRomInput, description: 'The minimum checksum level to calculate and use for matching', choices: Object.keys(ChecksumBitmask) @@ -217,6 +219,28 @@ export default class ArgumentsParser { requiresArg: true, default: ChecksumBitmask[ChecksumBitmask.CRC32].toUpperCase(), }) + .option('input-checksum-max', { + group: groupRomInput, + description: 'The maximum checksum level to calculate and use for matching', + choices: Object.keys(ChecksumBitmask) + .filter((bitmask) => Number.isNaN(Number(bitmask))) + .filter((bitmask) => ChecksumBitmask[bitmask as keyof typeof ChecksumBitmask] > 0) + .map((bitmask) => bitmask.toUpperCase()), + coerce: ArgumentsParser.getLastValue, // don't allow string[] values + requiresArg: true, + }) + .check((checkArgv) => { + const options = Options.fromObject(checkArgv); + const inputChecksumMin = options.getInputChecksumMin(); + const inputChecksumMax = options.getInputChecksumMax(); + if (inputChecksumMin !== undefined + && inputChecksumMax !== undefined + && inputChecksumMin > inputChecksumMax + ) { + throw new ExpectedError('Invalid --input-checksum-min & --input-checksum-max, the min must be less than the max'); + } + return true; + }) .option('input-checksum-archives', { group: groupRomInput, description: 'Calculate checksums of archive files themselves, allowing them to match files in DATs', @@ -257,15 +281,6 @@ export default class ArgumentsParser { coerce: ArgumentsParser.readRegexFile, requiresArg: true, }) - .option('dat-regex', { - type: 'string', - coerce: (val) => { - this.logger.warn('the \'--dat-regex\' option is deprecated, use \'--dat-name-regex\' instead'); - return ArgumentsParser.readRegexFile(val); - }, - requiresArg: true, - hidden: true, - }) .option('dat-name-regex-exclude', { group: groupDatInput, description: 'Regular expression of DAT names to exclude from processing', @@ -273,15 +288,6 @@ export default class ArgumentsParser { coerce: ArgumentsParser.readRegexFile, requiresArg: true, }) - .option('dat-regex-exclude', { - type: 'string', - coerce: (val) => { - this.logger.warn('the \'--dat-regex-exclude\' option is deprecated, use \'--dat-name-regex-exclude\' instead'); - return ArgumentsParser.readRegexFile(val); - }, - requiresArg: true, - hidden: true, - }) .option('dat-description-regex', { group: groupDatInput, description: 'Regular expression of DAT descriptions to process', @@ -333,17 +339,6 @@ export default class ArgumentsParser { requiresArg: true, }) - .option('fixdat', { - type: 'boolean', - coerce: (val: boolean) => { - this.logger.warn('the \'--fixdat\' option is deprecated, use the \'fixdat\' command instead'); - return val; - }, - implies: 'dat', - deprecated: true, - hidden: true, - }) - .option('output', { group: groupRomOutputPath, alias: 'o', @@ -437,10 +432,7 @@ export default class ArgumentsParser { type: 'boolean', }) .check((checkArgv) => { - if (checkArgv.help) { - return true; - } - const needOutput = ['copy', 'move', 'link', 'symlink', 'extract', 'zip', 'clean'].filter((command) => checkArgv._.includes(command)); + const needOutput = ['copy', 'move', 'link', 'extract', 'zip', 'clean'].filter((command) => checkArgv._.includes(command)); if (!checkArgv.output && needOutput.length > 0) { // TODO(cememr): print help message throw new ExpectedError(`Missing required argument for command${needOutput.length !== 1 ? 's' : ''} ${needOutput.join(', ')}: --output `); @@ -468,9 +460,6 @@ export default class ArgumentsParser { type: 'boolean', }) .check((checkArgv) => { - if (checkArgv.help) { - return true; - } const needClean = ['clean-exclude', 'clean-backup', 'clean-dry-run'].filter((option) => checkArgv[option]); if (!checkArgv._.includes('clean') && needClean.length > 0) { // TODO(cememr): print help message @@ -482,7 +471,7 @@ export default class ArgumentsParser { .option('zip-exclude', { group: groupRomZip, alias: 'Z', - description: 'Glob pattern of files to exclude from zipping', + description: 'Glob pattern of ROM filenames to exclude from zipping', type: 'string', coerce: ArgumentsParser.getLastValue, // don't allow string[] values requiresArg: true, @@ -493,9 +482,6 @@ export default class ArgumentsParser { type: 'boolean', }) .check((checkArgv) => { - if (checkArgv.help) { - return true; - } const needZip = ['zip-exclude', 'zip-dat-name'].filter((option) => checkArgv[option]); if (!checkArgv._.includes('zip') && needZip.length > 0) { throw new ExpectedError(`Missing required command for option${needZip.length !== 1 ? 's' : ''} ${needZip.join(', ')}: zip`); @@ -508,15 +494,6 @@ export default class ArgumentsParser { description: 'Creates symbolic links instead of hard links', type: 'boolean', }) - .middleware((middlewareArgv) => { - if (middlewareArgv._.includes('symlink')) { - this.logger.warn('the \'symlink\' command is deprecated, use \'link --symlink\' instead'); - if (middlewareArgv.symlink === undefined) { - // eslint-disable-next-line no-param-reassign - middlewareArgv.symlink = true; - } - } - }, true) .option('symlink-relative', { group: groupRomLink, description: 'Create symlinks as relative to the target path, as opposed to absolute', @@ -524,11 +501,8 @@ export default class ArgumentsParser { implies: 'symlink', }) .check((checkArgv) => { - if (checkArgv.help) { - return true; - } const needLinkCommand = ['symlink'].filter((option) => checkArgv[option]); - if (!checkArgv._.includes('link') && !checkArgv._.includes('symlink') && needLinkCommand.length > 0) { + if (!checkArgv._.includes('link') && needLinkCommand.length > 0) { throw new ExpectedError(`Missing required command for option${needLinkCommand.length !== 1 ? 's' : ''} ${needLinkCommand.join(', ')}: link`); } return true; @@ -536,7 +510,7 @@ export default class ArgumentsParser { .option('header', { group: groupRomHeader, - description: 'Glob pattern of files to force header processing for', + description: 'Glob pattern of input filenames to force header processing for', type: 'string', coerce: ArgumentsParser.getLastValue, // don't allow string[] values requiresArg: true, @@ -567,10 +541,30 @@ export default class ArgumentsParser { requiresArg: true, default: MergeMode[MergeMode.FULLNONMERGED].toLowerCase(), }) + .check((checkArgv) => { + // Re-implement `implies: 'dat'`, which isn't possible with a default value + if (checkArgv['merge-roms'] !== MergeMode[MergeMode.FULLNONMERGED].toLowerCase() && !checkArgv.dat) { + throw new ExpectedError('Missing dependent arguments:\n merge-roms -> dat'); + } + return true; + }) + .option('exclude-disks', { + group: groupRomSet, + description: 'Exclude CHD disks in DATs from processing & writing', + type: 'boolean', + implies: 'dat', + }) + .option('allow-excess-sets', { + group: groupRomSet, + description: 'Allow writing archives that have excess files when not extracting or zipping', + type: 'boolean', + implies: 'dat', + }) .option('allow-incomplete-sets', { group: groupRomSet, description: 'Allow writing games that don\'t have all of their ROMs', type: 'boolean', + implies: 'dat', }) .option('filter-regex', { @@ -604,16 +598,6 @@ export default class ArgumentsParser { } return true; }) - .option('language-filter', { - type: 'string', - coerce: (val: string) => { - this.logger.warn('the \'--language-filter\' option is deprecated, use \'--filter-language\' instead'); - return val.split(','); - }, - requiresArg: true, - deprecated: true, - hidden: true, - }) .option('filter-region', { group: groupRomFiltering, alias: 'R', @@ -628,16 +612,6 @@ export default class ArgumentsParser { throw new ExpectedError(`Invalid --filter-region region${invalidRegions.length !== 1 ? 's' : ''}: ${invalidRegions.join(', ')}`); } return true; - }) - .option('region-filter', { - type: 'string', - coerce: (val: string) => { - this.logger.warn('the \'--region-filter\' option is deprecated, use \'--filter-region\' instead'); - return val.split(','); - }, - requiresArg: true, - deprecated: true, - hidden: true, }); [ ['bios', 'BIOS files'], @@ -664,25 +638,23 @@ export default class ArgumentsParser { type: 'boolean', }); ([ - ['debug', 'debug ROMs', false], - ['demo', 'demo ROMs', false], - ['beta', 'beta ROMs', false], - ['sample', 'sample ROMs', false], - ['prototype', 'prototype ROMs', false], - ['test-roms', 'test ROMs', true], - ['program', 'program application ROMs', false], - ['aftermarket', 'aftermarket ROMs', false], - ['homebrew', 'homebrew ROMs', false], - ['unverified', 'unverified ROMs', false], - ['bad', 'bad ROM dumps', false], - ] satisfies [string, string, boolean][]).forEach(([key, description, hidden]) => { + ['debug', 'debug ROMs'], + ['demo', 'demo ROMs'], + ['beta', 'beta ROMs'], + ['sample', 'sample ROMs'], + ['prototype', 'prototype ROMs'], + ['program', 'program application ROMs'], + ['aftermarket', 'aftermarket ROMs'], + ['homebrew', 'homebrew ROMs'], + ['unverified', 'unverified ROMs'], + ['bad', 'bad ROM dumps'], + ]).forEach(([key, description]) => { yargsParser .option(`no-${key}`, { group: groupRomFiltering, description: `Filter out ${description}, opposite of --only-${key}`, type: 'boolean', conflicts: [`only-${key}`], - hidden, }) .option(`only-${key}`, { type: 'boolean', @@ -690,22 +662,6 @@ export default class ArgumentsParser { hidden: true, }); }); - yargsParser.middleware((middlewareArgv) => { - if (middlewareArgv['no-test-roms'] === true) { - this.logger.warn('the \'--no-test-roms\' option is deprecated, use \'--no-program\' instead'); - if (middlewareArgv.noProgram === undefined) { - // eslint-disable-next-line no-param-reassign - middlewareArgv.noProgram = true; - } - } - if (middlewareArgv['only-test-roms'] === true) { - this.logger.warn('the \'--only-test-roms\' option is deprecated, use \'--only-program\' instead'); - if (middlewareArgv.onlyProgram === undefined) { - // eslint-disable-next-line no-param-reassign - middlewareArgv.onlyProgram = true; - } - } - }, true); yargsParser .option('single', { @@ -774,18 +730,14 @@ export default class ArgumentsParser { } return true; }) - .option('prefer-revision-newer', { + .option('prefer-revision', { group: groupRomPriority, - description: 'Prefer newer ROM revisions over older', - type: 'boolean', - conflicts: ['prefer-revision-older'], - implies: 'single', - }) - .option('prefer-revision-older', { - group: groupRomPriority, - description: 'Prefer older ROM revisions over newer', - type: 'boolean', - conflicts: ['prefer-revision-newer'], + description: 'Prefer older or newer revisions, versions, or ring codes', + choices: Object.keys(PreferRevision) + .filter((mode) => Number.isNaN(Number(mode))) + .map((mode) => mode.toLowerCase()), + coerce: ArgumentsParser.getLastValue, // don't allow string[] values + requiresArg: true, implies: 'single', }) .option('prefer-retail', { @@ -794,20 +746,6 @@ export default class ArgumentsParser { type: 'boolean', implies: 'single', }) - .option('prefer-ntsc', { - group: groupRomPriority, - description: 'Prefer NTSC ROMs over others', - type: 'boolean', - conflicts: 'prefer-pal', - implies: 'single', - }) - .option('prefer-pal', { - group: groupRomPriority, - description: 'Prefer PAL ROMs over others', - type: 'boolean', - conflicts: 'prefer-ntsc', - implies: 'single', - }) .option('prefer-parent', { group: groupRomPriority, description: 'Prefer parent ROMs over clones', @@ -871,12 +809,12 @@ export default class ArgumentsParser { }) .option('disable-cache', { group: groupHelpDebug, - description: 'Disable the file checksum cache', + description: 'Disable loading or saving the cache file', type: 'boolean', }) .option('cache-path', { group: groupHelpDebug, - description: 'Location for the file checksum cache file', + description: 'Location for the cache file', type: 'string', coerce: ArgumentsParser.getLastValue, // don't allow string[] values requiresArg: true, @@ -931,7 +869,7 @@ Advanced usage: {datDescription} The description of the DAT that contains the ROM {region} The region of the ROM release (e.g. "USA"), each ROM can have multiple {language} The language of the ROM release (e.g. "En"), each ROM can have multiple - {gameType} The type of the game (e.g. "Retail", "Demo", "Prototype") + {type} The type of the game (e.g. "Retail", "Demo", "Prototype") {genre} The DAT-defined genre of the game {inputDirname} The input file's dirname diff --git a/src/modules/candidateArchiveFileHasher.ts b/src/modules/candidateArchiveFileHasher.ts index 54a602243..8b90052ee 100644 --- a/src/modules/candidateArchiveFileHasher.ts +++ b/src/modules/candidateArchiveFileHasher.ts @@ -1,16 +1,11 @@ -import { Semaphore } from 'async-mutex'; - import ProgressBar, { ProgressBarSymbol } from '../console/progressBar.js'; -import ElasticSemaphore from '../elasticSemaphore.js'; -import Defaults from '../globals/defaults.js'; -import FsPoly from '../polyfill/fsPoly.js'; +import DriveSemaphore from '../driveSemaphore.js'; import DAT from '../types/dats/dat.js'; import Parent from '../types/dats/parent.js'; import ArchiveFile from '../types/files/archives/archiveFile.js'; import FileFactory from '../types/files/fileFactory.js'; import Options from '../types/options.js'; import ReleaseCandidate from '../types/releaseCandidate.js'; -import ROMWithFiles from '../types/romWithFiles.js'; import Module from './module.js'; /** @@ -19,23 +14,22 @@ import Module from './module.js'; * {@link CandidatePreferer}. */ export default class CandidateArchiveFileHasher extends Module { - private static readonly THREAD_SEMAPHORE = new Semaphore(Number.MAX_SAFE_INTEGER); - - // WARN(cemmer): there is an undocumented semaphore max value that can be used, the full - // 4,700,372,992 bytes of a DVD+R will cause runExclusive() to never run or return. - private static readonly FILESIZE_SEMAPHORE = new ElasticSemaphore( - Defaults.MAX_READ_WRITE_CONCURRENT_KILOBYTES, + private static readonly DRIVE_SEMAPHORE = new DriveSemaphore( + Number.MAX_SAFE_INTEGER, ); private readonly options: Options; - constructor(options: Options, progressBar: ProgressBar) { + private readonly fileFactory: FileFactory; + + constructor(options: Options, progressBar: ProgressBar, fileFactory: FileFactory) { super(progressBar, CandidateArchiveFileHasher.name); this.options = options; + this.fileFactory = fileFactory; // This will be the same value globally, but we can't know the value at file import time - if (options.getReaderThreads() < CandidateArchiveFileHasher.THREAD_SEMAPHORE.getValue()) { - CandidateArchiveFileHasher.THREAD_SEMAPHORE.setValue(options.getReaderThreads()); + if (options.getReaderThreads() < CandidateArchiveFileHasher.DRIVE_SEMAPHORE.getValue()) { + CandidateArchiveFileHasher.DRIVE_SEMAPHORE.setValue(options.getReaderThreads()); } } @@ -67,8 +61,8 @@ export default class CandidateArchiveFileHasher extends Module { } this.progressBar.logTrace(`${dat.getNameShort()}: generating ${archiveFileCount.toLocaleString()} hashed ArchiveFile candidate${archiveFileCount !== 1 ? 's' : ''}`); - await this.progressBar.setSymbol(ProgressBarSymbol.HASHING); - await this.progressBar.reset(archiveFileCount); + this.progressBar.setSymbol(ProgressBarSymbol.CANDIDATE_HASHING); + this.progressBar.reset(archiveFileCount); const hashedParentsToCandidates = this.hashArchiveFiles(dat, parentsToCandidates); @@ -91,45 +85,46 @@ export default class CandidateArchiveFileHasher extends Module { return romWithFiles; } - return CandidateArchiveFileHasher.THREAD_SEMAPHORE.runExclusive(async () => { - const totalKilobytes = await FsPoly.size(inputFile.getFilePath()) / 1024; - return CandidateArchiveFileHasher.FILESIZE_SEMAPHORE.runExclusive(async () => { - await this.progressBar.incrementProgress(); + const outputFile = romWithFiles.getOutputFile(); + if (inputFile.equals(outputFile)) { + // There's no need to calculate the checksum, {@link CandidateWriter} will skip + // writing over itself + return romWithFiles; + } + + return CandidateArchiveFileHasher.DRIVE_SEMAPHORE.runExclusive( + inputFile, + async () => { + this.progressBar.incrementProgress(); const waitingMessage = `${inputFile.toString()} ...`; this.progressBar.addWaitingMessage(waitingMessage); this.progressBar.logTrace(`${dat.getNameShort()}: ${parent.getName()}: calculating checksums for: ${inputFile.toString()}`); - const hashedInputFile = await FileFactory.archiveFileFrom( + const hashedInputFile = await this.fileFactory.archiveFileFrom( inputFile.getArchive(), inputFile.getChecksumBitmask(), ); // {@link CandidateGenerator} would have copied undefined values from the input // file, so we need to modify the expected output file as well for testing - const hashedOutputFile = romWithFiles.getOutputFile().withProps({ + const hashedOutputFile = outputFile.withProps({ size: hashedInputFile.getSize(), crc32: hashedInputFile.getCrc32(), md5: hashedInputFile.getMd5(), sha1: hashedInputFile.getSha1(), sha256: hashedInputFile.getSha256(), }); - const hashedRomWithFiles = new ROMWithFiles( - romWithFiles.getRom(), - hashedInputFile, - hashedOutputFile, - ); + const hashedRomWithFiles = romWithFiles + .withInputFile(hashedInputFile) + .withOutputFile(hashedOutputFile); this.progressBar.removeWaitingMessage(waitingMessage); - await this.progressBar.incrementDone(); + this.progressBar.incrementDone(); return hashedRomWithFiles; - }, totalKilobytes); - }); + }, + ); })); - return new ReleaseCandidate( - releaseCandidate.getGame(), - releaseCandidate.getRelease(), - hashedRomsWithFiles, - ); + return releaseCandidate.withRomsWithFiles(hashedRomsWithFiles); })); return [parent, hashedReleaseCandidates]; diff --git a/src/modules/candidateCombiner.ts b/src/modules/candidateCombiner.ts index d934e8919..8020a5f24 100644 --- a/src/modules/candidateCombiner.ts +++ b/src/modules/candidateCombiner.ts @@ -8,7 +8,6 @@ import ROM from '../types/dats/rom.js'; import ArchiveEntry from '../types/files/archives/archiveEntry.js'; import Options from '../types/options.js'; import ReleaseCandidate from '../types/releaseCandidate.js'; -import ROMWithFiles from '../types/romWithFiles.js'; import Module from './module.js'; /** @@ -26,10 +25,10 @@ export default class CandidateCombiner extends Module { /** * Combine the candidates. */ - async combine( + combine( dat: DAT, parentsToCandidates: Map, - ): Promise> { + ): Map { if (!this.options.getZipDatName()) { return parentsToCandidates; } @@ -40,8 +39,8 @@ export default class CandidateCombiner extends Module { } this.progressBar.logTrace(`${dat.getNameShort()}: generating consolidated candidate`); - await this.progressBar.setSymbol(ProgressBarSymbol.COMBINING_ALL); - await this.progressBar.reset(parentsToCandidates.size); + this.progressBar.setSymbol(ProgressBarSymbol.CANDIDATE_COMBINING); + this.progressBar.reset(parentsToCandidates.size); const game = CandidateCombiner.buildGame(dat, parentsToCandidates); const parent = new Parent(game); @@ -104,11 +103,7 @@ export default class CandidateCombiner extends Module { )); } - return new ROMWithFiles( - romWithFiles.getRom(), - romWithFiles.getInputFile(), - outputEntry, - ); + return romWithFiles.withOutputFile(outputEntry); })); return new ReleaseCandidate(game, undefined, romsWithFiles); diff --git a/src/modules/candidateExtensionCorrector.ts b/src/modules/candidateExtensionCorrector.ts index e31193dd5..8b0cde374 100644 --- a/src/modules/candidateExtensionCorrector.ts +++ b/src/modules/candidateExtensionCorrector.ts @@ -7,7 +7,9 @@ import DAT from '../types/dats/dat.js'; import Parent from '../types/dats/parent.js'; import ROM from '../types/dats/rom.js'; import ArchiveEntry from '../types/files/archives/archiveEntry.js'; -import FileCache from '../types/files/fileCache.js'; +import Chd from '../types/files/archives/chd/chd.js'; +import FileFactory from '../types/files/fileFactory.js'; +import FileSignature from '../types/files/fileSignature.js'; import Options, { FixExtension } from '../types/options.js'; import OutputFactory from '../types/outputFactory.js'; import ReleaseCandidate from '../types/releaseCandidate.js'; @@ -24,9 +26,12 @@ export default class CandidateExtensionCorrector extends Module { private readonly options: Options; - constructor(options: Options, progressBar: ProgressBar) { + private readonly fileFactory: FileFactory; + + constructor(options: Options, progressBar: ProgressBar, fileFactory: FileFactory) { super(progressBar, CandidateExtensionCorrector.name); this.options = options; + this.fileFactory = fileFactory; // This will be the same value globally, but we can't know the value at file import time if (options.getReaderThreads() < CandidateExtensionCorrector.THREAD_SEMAPHORE.getValue()) { @@ -51,9 +56,14 @@ export default class CandidateExtensionCorrector extends Module { .flatMap((releaseCandidate) => releaseCandidate.getRomsWithFiles()) .filter((romWithFiles) => this.romNeedsCorrecting(romWithFiles)) .length; + if (romsThatNeedCorrecting === 0) { + this.progressBar.logTrace(`${dat.getNameShort()}: no output files need their extension corrected`); + return parentsToCandidates; + } + this.progressBar.logTrace(`${dat.getNameShort()}: correcting ${romsThatNeedCorrecting.toLocaleString()} output file extension${romsThatNeedCorrecting !== 1 ? 's' : ''}`); - await this.progressBar.setSymbol(ProgressBarSymbol.EXTENSION_CORRECTION); - await this.progressBar.reset(romsThatNeedCorrecting); + this.progressBar.setSymbol(ProgressBarSymbol.CANDIDATE_EXTENSION_CORRECTION); + this.progressBar.reset(romsThatNeedCorrecting); const correctedParentsToCandidates = await this.correctExtensions(dat, parentsToCandidates); @@ -62,6 +72,16 @@ export default class CandidateExtensionCorrector extends Module { } private romNeedsCorrecting(romWithFiles: ROMWithFiles): boolean { + if (romWithFiles.getRom().getName().trim() === '') { + return true; + } + + const inputFile = romWithFiles.getInputFile(); + if (inputFile instanceof ArchiveEntry && inputFile.getArchive() instanceof Chd) { + // Files within CHDs never need extension correction + return false; + } + return this.options.getFixExtension() === FixExtension.ALWAYS || (this.options.getFixExtension() === FixExtension.AUTO && ( !this.options.usingDats() @@ -102,18 +122,12 @@ export default class CandidateExtensionCorrector extends Module { .withEntryPath(correctedOutputPath.entryPath); } - return new ROMWithFiles( - correctedRom, - romWithFiles.getInputFile(), - correctedOutputFile, - ); + return romWithFiles + .withRom(correctedRom) + .withOutputFile(correctedOutputFile); })); - return new ReleaseCandidate( - releaseCandidate.getGame(), - releaseCandidate.getRelease(), - hashedRomsWithFiles, - ); + return releaseCandidate.withRomsWithFiles(hashedRomsWithFiles); })); return [parent, hashedReleaseCandidates]; @@ -142,13 +156,18 @@ export default class CandidateExtensionCorrector extends Module { } await CandidateExtensionCorrector.THREAD_SEMAPHORE.runExclusive(async () => { - await this.progressBar.incrementProgress(); + this.progressBar.incrementProgress(); const waitingMessage = `${releaseCandidate.getName()} ...`; this.progressBar.addWaitingMessage(waitingMessage); this.progressBar.logTrace(`${dat.getNameShort()}: ${parent.getName()}: correcting extension for: ${romWithFiles.getInputFile() .toString()}`); - const romSignature = await FileCache.getOrComputeFileSignature(romWithFiles.getInputFile()); + let romSignature: FileSignature | undefined; + try { + romSignature = await this.fileFactory.signatureFrom(romWithFiles.getInputFile()); + } catch (error) { + this.progressBar.logError(`${dat.getNameShort()}: failed to correct file extension for '${romWithFiles.getInputFile()}': ${error}`); + } if (romSignature) { // ROM file signature found, use the appropriate extension const { dir, name } = path.parse(correctedRom.getName()); @@ -160,7 +179,7 @@ export default class CandidateExtensionCorrector extends Module { } this.progressBar.removeWaitingMessage(waitingMessage); - await this.progressBar.incrementDone(); + this.progressBar.incrementDone(); }); return correctedRom; diff --git a/src/modules/candidateGenerator.ts b/src/modules/candidateGenerator.ts index 1ccf51b0f..478485e6e 100644 --- a/src/modules/candidateGenerator.ts +++ b/src/modules/candidateGenerator.ts @@ -13,6 +13,7 @@ import ROM from '../types/dats/rom.js'; import Archive from '../types/files/archives/archive.js'; import ArchiveEntry from '../types/files/archives/archiveEntry.js'; import ArchiveFile from '../types/files/archives/archiveFile.js'; +import Chd from '../types/files/archives/chd/chd.js'; import Zip from '../types/files/archives/zip.js'; import File from '../types/files/file.js'; import IndexedFiles from '../types/indexedFiles.js'; @@ -57,14 +58,14 @@ export default class CandidateGenerator extends Module { const parents = dat.getParents(); this.progressBar.logTrace(`${dat.getNameShort()}: generating candidates`); - await this.progressBar.setSymbol(ProgressBarSymbol.GENERATING); - await this.progressBar.reset(parents.length); + this.progressBar.setSymbol(ProgressBarSymbol.CANDIDATE_GENERATING); + this.progressBar.reset(parents.length); // For each parent, try to generate a parent candidate await Promise.all(parents.map(async ( parent, ) => CandidateGenerator.THREAD_SEMAPHORE.runExclusive(async () => { - await this.progressBar.incrementProgress(); + this.progressBar.incrementProgress(); const waitingMessage = `${parent.getName()} ...`; this.progressBar.addWaitingMessage(waitingMessage); @@ -97,7 +98,7 @@ export default class CandidateGenerator extends Module { output.set(parent, releaseCandidates); this.progressBar.removeWaitingMessage(waitingMessage); - await this.progressBar.incrementDone(); + this.progressBar.incrementDone(); }))); const size = [...output.values()] @@ -119,13 +120,19 @@ export default class CandidateGenerator extends Module { ): Promise { const romsToInputFiles = this.getInputFilesForGame(dat, game, indexedFiles); + const gameRoms = [ + ...game.getRoms(), + ...(this.options.getExcludeDisks() ? [] : game.getDisks()), + ]; + // For each Game's ROM, find the matching File const romFiles = await Promise.all( - game.getRoms().map(async (rom) => { - if (!romsToInputFiles.has(rom)) { + gameRoms.map(async (rom) => { + let inputFile = romsToInputFiles.get(rom); + if (inputFile === undefined) { return [rom, undefined]; } - let inputFile = romsToInputFiles.get(rom) as File; + /** * WARN(cemmer): {@link inputFile} may not be an exact match for {@link rom}. There are two * situations we can be in: @@ -147,10 +154,7 @@ export default class CandidateGenerator extends Module { || (inputFile.getSha1() !== undefined && inputFile.getSha1() === rom.getSha1()) || (inputFile.getSha256() !== undefined && inputFile.getSha256() === rom.getSha256())) // ...and we shouldn't remove the header - && !this.options.canRemoveHeader( - dat, - path.extname(inputFile.getExtractedFilePath()), - ) + && !this.options.canRemoveHeader(path.extname(inputFile.getExtractedFilePath())) ) { // ...then forget the input file's header, so that we don't later remove it this.progressBar.logTrace(`${dat.getNameShort()}: ${game.getName()}: not removing header, ignoring that one was found for: ${inputFile.toString()}`); @@ -173,20 +177,21 @@ export default class CandidateGenerator extends Module { } /** - * If the matched input file is from an archive, and we're not zipping or extracting, then - * treat the file as "raw" so it can be copied/moved as-is. - * Matches {@link ROMHeaderProcessor.getFileWithHeader} + * If the matched input file is from an archive, and we can raw-copy that entire archive, + * then treat the file as "raw" so it can be copied/moved as-is. */ if (inputFile instanceof ArchiveEntry - && !this.options.shouldZipFile(rom.getName()) - && !this.options.shouldExtract() + && this.shouldGenerateArchiveFile(dat, game, release, rom, romsToInputFiles) ) { try { - // Note: we're delaying checksum calculation for now, {@link CandidateArchiveFileHasher} - // will handle it later + // Note: we're delaying checksum calculations for now, + // {@link CandidateArchiveFileHasher} will handle it later inputFile = new ArchiveFile( inputFile.getArchive(), - { checksumBitmask: inputFile.getChecksumBitmask() }, + { + size: await fsPoly.size(inputFile.getFilePath()), + checksumBitmask: inputFile.getChecksumBitmask(), + }, ); } catch (error) { this.progressBar.logWarn(`${dat.getNameShort()}: ${game.getName()}: ${error}`); @@ -210,16 +215,17 @@ export default class CandidateGenerator extends Module { const foundRomsWithFiles = romFiles .map(([, romWithFiles]) => romWithFiles) - .filter(ArrayPoly.filterNotNullish); + .filter((romWithFiles) => romWithFiles !== undefined); if (romFiles.length > 0 && foundRomsWithFiles.length === 0) { // The Game has ROMs, but none were found return undefined; } - // Ignore the Game if not every File is present const missingRoms = romFiles .filter(([, romWithFiles]) => !romWithFiles) .map(([rom]) => rom); + + // Ignore the Game if not every File is present if (missingRoms.length > 0 && !this.options.getAllowIncompleteSets()) { if (foundRomsWithFiles.length > 0) { this.logMissingRomFiles(dat, game, release, foundRomsWithFiles, missingRoms); @@ -232,6 +238,15 @@ export default class CandidateGenerator extends Module { return undefined; } + // If the found files have excess and we aren't allowing it, then return no candidate + if (!this.options.shouldZip() + && !this.options.shouldExtract() + && !this.options.getAllowExcessSets() + && this.hasExcessFiles(dat, game, foundRomsWithFiles, indexedFiles) + ) { + return undefined; + } + return new ReleaseCandidate(game, release, foundRomsWithFiles); } @@ -240,7 +255,12 @@ export default class CandidateGenerator extends Module { game: Game, indexedFiles: IndexedFiles, ): Map { - const romsAndInputFiles = game.getRoms().map((rom) => ([ + const gameRoms = [ + ...game.getRoms(), + ...(this.options.getExcludeDisks() ? [] : game.getDisks()), + ]; + + const romsAndInputFiles = gameRoms.map((rom) => ([ rom, indexedFiles.findFiles(rom) ?? [], ])) satisfies [ROM, File[]][]; @@ -259,30 +279,69 @@ export default class CandidateGenerator extends Module { // Group this Game's ROMs by the input Archives that contain them const inputArchivesToRoms = romsAndInputFiles.reduce((map, [rom, files]) => { files - .filter((file) => file instanceof ArchiveEntry) - .map((file): Archive => (file as ArchiveEntry).getArchive()) + .filter((file): file is ArchiveEntry => file instanceof ArchiveEntry) + .map((archive): Archive => archive.getArchive()) .forEach((archive) => { - const roms = map.get(archive) ?? []; - roms.push(rom); // We need to filter out duplicate ROMs because of Games that contain duplicate ROMs, e.g. // optical media games that have the same track multiple times. - const uniqueRoms = roms.reduce(ArrayPoly.reduceUnique(), []); - map.set(archive, uniqueRoms); + if (!map.has(archive)) { + map.set(archive, new Set()); + } + map.get(archive)?.add(rom); }); return map; - }, new Map()); + }, new Map>()); // Filter to the Archives that contain every ROM in this Game const archivesWithEveryRom = [...inputArchivesToRoms.entries()] - .filter(([, roms]) => roms.length === game.getRoms().length) + .filter(([inputArchive, roms]) => { + if ([...roms].map((rom) => rom.hashCode()).join(',') === gameRoms.map((rom) => rom.hashCode()).join(',')) { + return true; + } + // If there is a CHD with every .bin file, and we're raw-copying it, then assume its .cue + // file is accurate + return inputArchive instanceof Chd + && !gameRoms.some((rom) => this.options.shouldZipRom(rom)) + && !gameRoms.some((rom) => this.options.shouldExtractRom(rom)) + && CandidateGenerator.onlyCueFilesMissingFromChd(game, [...roms]); + }) .map(([archive]) => archive); - const archiveWithEveryRom = archivesWithEveryRom.at(0); + + const filesByPath = indexedFiles.getFilesByFilePath(); + const filteredArchivesWithEveryRom = archivesWithEveryRom + .sort((a, b) => { + // First, prefer the archive with the least number of entries + const aEntries = filesByPath.get(a.getFilePath())?.length ?? 0; + const bEntries = filesByPath.get(b.getFilePath())?.length ?? 0; + if (aEntries !== bEntries) { + return aEntries - bEntries; + } + + // Then, prefer archives whose filename contains the game name + const aGameName = path.basename(a.getFilePath()).includes(game.getName()) ? 1 : 0; + const bGameName = path.basename(b.getFilePath()).includes(game.getName()) ? 1 : 0; + return aGameName - bGameName; + }) + // Filter out Archives with excess entries + .filter((archive) => { + const unusedEntryPaths = this.findArchiveUnusedEntryPaths( + archive, + romsAndInputFiles.flatMap(([, inputFiles]) => inputFiles), + indexedFiles, + ); + if (unusedEntryPaths.length > 0) { + this.progressBar.logTrace(`${dat.getNameShort()}: ${game.getName()}: not preferring archive that contains every ROM, plus the excess entries:\n${unusedEntryPaths.map((entryPath) => ` ${entryPath}`).join('\n')}`); + } + return unusedEntryPaths.length === 0; + }); + + const archiveWithEveryRom = filteredArchivesWithEveryRom.at(0); // Do nothing if any of... if ( // The Game has zero or one ROM, therefore, we don't really care where the file comes from, // and we should respect any previous sorting of the input files - game.getRoms().length <= 1 + gameRoms.length <= 1 // No input archive contains every ROM from this Game || archiveWithEveryRom === undefined // We're extracting files, therefore, we don't really care where the file comes from, and we @@ -298,13 +357,75 @@ export default class CandidateGenerator extends Module { // For each of this Game's ROMs, find the matching ArchiveEntry from this Archive return new Map(romsAndInputFiles.map(([rom, inputFiles]) => { this.progressBar.logTrace(`${dat.getNameShort()}: ${game.getName()}: preferring input archive that contains every ROM: ${archiveWithEveryRom.getFilePath()}`); - const archiveEntry = inputFiles.find(( - inputFile, - ) => inputFile.getFilePath() === archiveWithEveryRom.getFilePath()) as File; - return [rom, archiveEntry]; + let archiveEntry = inputFiles + .find((inputFile) => inputFile.getFilePath() === archiveWithEveryRom.getFilePath()); + + if (!archiveEntry + && rom.getName().toLowerCase().endsWith('.cue') + && archiveWithEveryRom instanceof Chd + ) { + // We assumed this CHD was fine above, find its .cue file + archiveEntry = filesByPath.get(archiveWithEveryRom.getFilePath()) + ?.find((file) => file.getExtractedFilePath().toLowerCase().endsWith('.cue')); + } + + return [rom, archiveEntry as File]; })); } + private shouldGenerateArchiveFile( + dat: DAT, + game: Game, + release: Release | undefined, + rom: ROM, + romsToInputFiles: Map, + ): boolean { + if ([...romsToInputFiles.values()].some((inputFile) => inputFile.getFileHeader() !== undefined + || inputFile.getPatch() !== undefined) + ) { + // At least one output file won't exactly match its input file, don't generate an archive + // file + return false; + } + + if (romsToInputFiles.get(rom) instanceof ArchiveEntry + && !this.options.shouldZipRom(rom) + && !this.options.shouldExtractRom(rom) + ) { + // This ROM's input file is already archived, and we're not [re-]zipping or extracting, so + // we want to leave it as-is. We'll check later if the input archive has excess files. + return true; + } + + // TODO(cemmer): delay this until after CandidatePatchGenerator + if (this.options.getPatchFileCount() === 0 + && !this.options.getZipDatName() + && [...romsToInputFiles.entries()] + .every(([inputRom, inputFile]) => inputFile instanceof ArchiveEntry + && inputFile.getArchive() instanceof Zip + && this.options.shouldZipRom(inputRom) + && OutputFactory.getPath( + this.options, + dat, + game, + release, + inputRom, + inputFile, + ).entryPath === inputFile.getExtractedFilePath()) + && [...romsToInputFiles.values()] + .map((inputFile) => inputFile.getFilePath()) + .reduce(ArrayPoly.reduceUnique(), []) + .length === 1 + ) { + // Every ROM should be zipped, and every input file is already in the same zip, and the + // archive entry paths match, so it's safe to copy the zip as-is + return true; + } + + // Return false by default + return false; + } + private async getOutputFile( dat: DAT, game: Game, @@ -344,7 +465,7 @@ export default class CandidateGenerator extends Module { } // Determine the output file type - if (this.options.shouldZipFile(rom.getName())) { + if (this.options.shouldZipRom(rom) && !(inputFile instanceof ArchiveFile)) { // Should zip, return an archive entry within an output zip return ArchiveEntry.entryOf({ archive: new Zip(outputFilePath), @@ -425,4 +546,102 @@ export default class CandidateGenerator extends Module { } return hasConflict; } + + /** + * Given a {@link Game}, return true if all conditions are met: + * - The {@link Game} only has .bin and .cue files + * - Out of the {@link ROM}s that were found in an input directory for the {@link Game}, every + * .bin was found but at least one .cue file is missing + * This is only relevant when we are raw-copying CHD files, where it is difficult to ensure that + * the .cue file is accurate. + */ + private static onlyCueFilesMissingFromChd( + game: Game, + foundRoms: ROM[], + ): boolean { + // Only games with only bin/cue files can have only a cue file missing + const allCueBin = game.getRoms() + .flat() + .every((rom) => ['.bin', '.cue'].includes(path.extname(rom.getName()).toLowerCase())); + if (foundRoms.length === 0 || !allCueBin) { + return false; + } + + const foundRomNames = new Set(foundRoms.map((rom) => rom.getName())); + const missingCueRoms = game.getRoms() + .filter((rom) => !foundRomNames.has(rom.getName())) + .filter((rom) => path.extname(rom.getName()).toLowerCase() === '.cue'); + const missingNonCueRoms = game.getRoms() + .filter((rom) => !foundRomNames.has(rom.getName())) + .filter((rom) => path.extname(rom.getName()).toLowerCase() !== '.cue'); + return missingCueRoms.length > 0 && missingNonCueRoms.length === 0; + } + + private hasExcessFiles( + dat: DAT, + game: Game, + romsWithFiles: ROMWithFiles[], + indexedFiles: IndexedFiles, + ): boolean { + // For this Game, find every input file that is an ArchiveEntry + const inputArchiveEntries = romsWithFiles + // We need to rehydrate information from IndexedFiles because raw-copying/moving archives + // would have lost this information + .map((romWithFiles) => { + const inputFile = romWithFiles.getInputFile(); + return indexedFiles.findFiles(romWithFiles.getRom()) + ?.find((foundFile) => foundFile.getFilePath() === inputFile.getFilePath()); + }) + .filter((inputFile) => inputFile instanceof ArchiveEntry || inputFile instanceof ArchiveFile); + // ...then translate those ArchiveEntries into a list of unique Archives + const inputArchives = inputArchiveEntries + .map((archiveEntry) => archiveEntry.getArchive()) + .filter(ArrayPoly.filterUniqueMapped((archive) => archive.getFilePath())); + + for (const inputArchive of inputArchives) { + const unusedEntryPaths = this.findArchiveUnusedEntryPaths( + inputArchive, + inputArchiveEntries, + indexedFiles, + ); + if (unusedEntryPaths.length > 0) { + this.progressBar.logTrace(`${dat.getNameShort()}: ${game.getName()}: cannot use '${inputArchive.getFilePath()}' as an input file, it has the excess entries:\n${unusedEntryPaths.map((entryPath) => ` ${entryPath}`).join('\n')}`); + return true; + } + } + + return false; + } + + /** + * Given an input {@link archive} and a set of {@link inputFiles} that match to a {@link ROM} from + * a {@link Game}, determine if every entry from the {@link archive} was matched. + */ + private findArchiveUnusedEntryPaths( + archive: Archive, + inputFiles: File[], + indexedFiles: IndexedFiles, + ): ArchiveEntry[] { + if (this.options.shouldZip() + || this.options.shouldExtract() + || this.options.getAllowExcessSets() + ) { + // We don't particularly care where input files come from + return []; + } + + // Find the Archive's entries (all of them, not just ones that match ROMs in this Game) + // NOTE(cemmer): we need to use hashCode() because a Game may have duplicate ROMs that all got + // matched to the same input file, so not every archive entry may be in {@link inputFiles} + const archiveEntryHashCodes = new Set(inputFiles + .filter((entry) => entry.getFilePath() === archive.getFilePath()) + .filter((file): file is ArchiveEntry => file instanceof ArchiveEntry) + .map((entry) => entry.hashCode())); + + // Find which of the Archive's entries didn't match to a ROM from this Game + return (indexedFiles.getFilesByFilePath().get(archive.getFilePath()) ?? []) + .filter((file): file is ArchiveEntry => file instanceof ArchiveEntry) + .filter((file) => !(archive instanceof Chd) || !file.getExtractedFilePath().toLowerCase().endsWith('.cue')) + .filter((entry) => !archiveEntryHashCodes.has(entry.hashCode())); + } } diff --git a/src/modules/candidateMergeSplitValidator.ts b/src/modules/candidateMergeSplitValidator.ts index 475766d5a..ba75642d2 100644 --- a/src/modules/candidateMergeSplitValidator.ts +++ b/src/modules/candidateMergeSplitValidator.ts @@ -23,18 +23,18 @@ export default class CandidateMergeSplitValidator extends Module { /** * Validate the {@link ReleaseCandidate}s. */ - async validate( + validate( dat: DAT, parentsToCandidates: Map, - ): Promise { + ): string[] { if (parentsToCandidates.size === 0) { this.progressBar.logTrace(`${dat.getNameShort()}: no parents to validate merged & split ROM sets for`); return []; } this.progressBar.logTrace(`${dat.getNameShort()}: validating merged & split ROM sets`); - await this.progressBar.setSymbol(ProgressBarSymbol.VALIDATING); - await this.progressBar.reset(parentsToCandidates.size); + this.progressBar.setSymbol(ProgressBarSymbol.CANDIDATE_VALIDATING); + this.progressBar.reset(parentsToCandidates.size); const datGamesIndexed = dat.getGames().reduce((map, game) => { map.set(game.getName(), game); @@ -72,7 +72,7 @@ export default class CandidateMergeSplitValidator extends Module { ) { const missingDeviceGames = game.getDeviceRefs() .map((deviceRef) => datGamesIndexed.get(deviceRef.getName())) - .filter(ArrayPoly.filterNotNullish) + .filter((deviceGame) => deviceGame !== undefined) // Dependent device has ROM files .filter((deviceGame) => deviceGame.getRoms().length) .map((deviceGame) => { @@ -83,7 +83,7 @@ export default class CandidateMergeSplitValidator extends Module { } return deviceGame.getName(); }) - .filter(ArrayPoly.filterNotNullish) + .filter((deviceGameName) => deviceGameName !== undefined) .sort(); missingDependencies = [...missingDependencies, ...missingDeviceGames]; } diff --git a/src/modules/candidatePatchGenerator.ts b/src/modules/candidatePatchGenerator.ts index 9c3fd591d..439d63ac6 100644 --- a/src/modules/candidatePatchGenerator.ts +++ b/src/modules/candidatePatchGenerator.ts @@ -1,7 +1,6 @@ import path from 'node:path'; import ProgressBar, { ProgressBarSymbol } from '../console/progressBar.js'; -import ArrayPoly from '../polyfill/arrayPoly.js'; import DAT from '../types/dats/dat.js'; import Game from '../types/dats/game.js'; import Parent from '../types/dats/parent.js'; @@ -37,8 +36,8 @@ export default class CandidatePatchGenerator extends Module { } this.progressBar.logTrace(`${dat.getNameShort()}: generating patched candidates`); - await this.progressBar.setSymbol(ProgressBarSymbol.GENERATING); - await this.progressBar.reset(parentsToCandidates.size); + this.progressBar.setSymbol(ProgressBarSymbol.CANDIDATE_GENERATING); + this.progressBar.reset(parentsToCandidates.size); const crcToPatches = CandidatePatchGenerator.indexPatchesByCrcBefore(patches); this.progressBar.logTrace(`${dat.getNameShort()}: ${crcToPatches.size} unique patch${crcToPatches.size !== 1 ? 'es' : ''} found`); @@ -51,10 +50,12 @@ export default class CandidatePatchGenerator extends Module { private static indexPatchesByCrcBefore(patches: Patch[]): Map { return patches.reduce((map, patch) => { - map.set(patch.getCrcBefore(), [ - ...(map.get(patch.getCrcBefore()) ?? []), - patch, - ]); + const key = patch.getCrcBefore(); + if (!map.has(key)) { + map.set(key, [patch]); + } else { + map.get(key)?.push(patch); + } return map; }, new Map()); } @@ -110,7 +111,7 @@ export default class CandidatePatchGenerator extends Module { .flatMap((romWithFiles) => romWithFiles.getInputFile()) .filter((inputFile) => inputFile.getCrc32() !== undefined) .flatMap((inputFile) => crcToPatches.get(inputFile.getCrc32() as string)) - .filter(ArrayPoly.filterNotNullish); + .filter((patch) => patch !== undefined); // No relevant patches found, no new candidates generated if (releaseCandidatePatches.length === 0) { diff --git a/src/modules/candidatePostProcessor.ts b/src/modules/candidatePostProcessor.ts index cb3bd9e25..3a8576892 100644 --- a/src/modules/candidatePostProcessor.ts +++ b/src/modules/candidatePostProcessor.ts @@ -22,18 +22,18 @@ export default class CandidatePostProcessor extends Module { /** * Post-process the candidates. */ - async process( + process( dat: DAT, parentsToCandidates: Map, - ): Promise> { + ): Map { if (parentsToCandidates.size === 0) { this.progressBar.logTrace(`${dat.getNameShort()}: no parents, so no candidates to process`); return parentsToCandidates; } this.progressBar.logTrace(`${dat.getNameShort()}: processing candidates`); - await this.progressBar.setSymbol(ProgressBarSymbol.GENERATING); - await this.progressBar.reset(parentsToCandidates.size); + this.progressBar.setSymbol(ProgressBarSymbol.CANDIDATE_GENERATING); + this.progressBar.reset(parentsToCandidates.size); // Get the output basename of every ROM const outputFileBasenames = [...parentsToCandidates.values()] @@ -78,11 +78,7 @@ export default class CandidatePostProcessor extends Module { outputFileBasenames, ); - return new ReleaseCandidate( - releaseCandidate.getGame(), - releaseCandidate.getRelease(), - newRomsWithFiles, - ); + return releaseCandidate.withRomsWithFiles(newRomsWithFiles); } private mapRomsWithFiles( @@ -106,11 +102,7 @@ export default class CandidatePostProcessor extends Module { } const newOutputFile = romWithFiles.getOutputFile().withFilePath(newOutputPath); - return new ROMWithFiles( - romWithFiles.getRom(), - romWithFiles.getInputFile(), - newOutputFile, - ); + return romWithFiles.withOutputFile(newOutputFile); }); } } diff --git a/src/modules/candidatePreferer.ts b/src/modules/candidatePreferer.ts index 4a5bfb44d..36f6ea989 100644 --- a/src/modules/candidatePreferer.ts +++ b/src/modules/candidatePreferer.ts @@ -2,7 +2,7 @@ import ProgressBar, { ProgressBarSymbol } from '../console/progressBar.js'; import fsPoly from '../polyfill/fsPoly.js'; import DAT from '../types/dats/dat.js'; import Parent from '../types/dats/parent.js'; -import Options from '../types/options.js'; +import Options, { PreferRevision } from '../types/options.js'; import ReleaseCandidate from '../types/releaseCandidate.js'; import Module from './module.js'; @@ -21,10 +21,10 @@ export default class CandidatePreferer extends Module { /** * Prefer some candidates. */ - async prefer( + prefer( dat: DAT, parentsToCandidates: Map, - ): Promise> { + ): Map { this.progressBar.logTrace(`${dat.getNameShort()}: preferring candidates`); if (parentsToCandidates.size === 0) { @@ -45,10 +45,10 @@ export default class CandidatePreferer extends Module { return parentsToCandidates; } - await this.progressBar.setSymbol(ProgressBarSymbol.FILTERING); - await this.progressBar.reset(parentsToCandidates.size); + this.progressBar.setSymbol(ProgressBarSymbol.CANDIDATE_FILTERING); + this.progressBar.reset(parentsToCandidates.size); - const output = await this.sortAndFilter(dat, parentsToCandidates); + const output = this.sortAndFilter(dat, parentsToCandidates); const size = [...output.values()] .flat() @@ -61,15 +61,15 @@ export default class CandidatePreferer extends Module { return output; } - private async sortAndFilter( + private sortAndFilter( dat: DAT, parentsToCandidates: Map, - ): Promise> { + ): Map { const output = new Map(); for (let i = 0; i < [...parentsToCandidates.entries()].length; i += 1) { const [parent, releaseCandidates] = [...parentsToCandidates.entries()][i]; - await this.progressBar.incrementProgress(); + this.progressBar.incrementProgress(); if (releaseCandidates.length > 1) { // Reduce log spam by only logging parents that can be changed this.progressBar.logTrace(`${dat.getNameShort()}: ${parent.getName()} (parent): ${releaseCandidates.length.toLocaleString()} candidate${releaseCandidates.length !== 1 ? 's' : ''} before filtering`); @@ -86,7 +86,7 @@ export default class CandidatePreferer extends Module { output.set(parent, []); } - await this.progressBar.incrementDone(); + this.progressBar.incrementDone(); } return output; @@ -109,8 +109,6 @@ export default class CandidatePreferer extends Module { || this.preferRegionsSort(a, b) || this.preferRevisionSort(a, b) || this.preferRetailSort(a, b) - || this.preferNTSCSort(a, b) - || this.preferPALSort(a, b) || this.preferParentSort(a, b); } @@ -190,9 +188,9 @@ export default class CandidatePreferer extends Module { } private preferRevisionSort(a: ReleaseCandidate, b: ReleaseCandidate): number { - if (this.options.getPreferRevisionNewer()) { + if (this.options.getPreferRevision() === PreferRevision.NEWER) { return b.getGame().getRevision() - a.getGame().getRevision(); - } if (this.options.getPreferRevisionOlder()) { + } if (this.options.getPreferRevision() === PreferRevision.OLDER) { return a.getGame().getRevision() - b.getGame().getRevision(); } return 0; @@ -205,20 +203,6 @@ export default class CandidatePreferer extends Module { return (a.getGame().isRetail() ? 0 : 1) - (b.getGame().isRetail() ? 0 : 1); } - private preferNTSCSort(a: ReleaseCandidate, b: ReleaseCandidate): number { - if (!this.options.getPreferNTSC()) { - return 0; - } - return (a.getGame().isNTSC() ? 0 : 1) - (b.getGame().isNTSC() ? 0 : 1); - } - - private preferPALSort(a: ReleaseCandidate, b: ReleaseCandidate): number { - if (!this.options.getPreferPAL()) { - return 0; - } - return (a.getGame().isPAL() ? 0 : 1) - (b.getGame().isPAL() ? 0 : 1); - } - private preferParentSort(a: ReleaseCandidate, b: ReleaseCandidate): number { if (!this.options.getPreferParent()) { return 0; diff --git a/src/modules/candidateValidator.ts b/src/modules/candidateValidator.ts index 39f07a9ee..bf77185f4 100644 --- a/src/modules/candidateValidator.ts +++ b/src/modules/candidateValidator.ts @@ -16,18 +16,18 @@ export default class CandidateValidator extends Module { /** * Validate the {@link ReleaseCandidate}s. */ - async validate( + validate( dat: DAT, parentsToCandidates: Map, - ): Promise { + ): ReleaseCandidate[] { if (parentsToCandidates.size === 0) { this.progressBar.logTrace(`${dat.getNameShort()}: no parents to validate candidates for`); return []; } this.progressBar.logTrace(`${dat.getNameShort()}: validating candidates`); - await this.progressBar.setSymbol(ProgressBarSymbol.VALIDATING); - await this.progressBar.reset(parentsToCandidates.size); + this.progressBar.setSymbol(ProgressBarSymbol.CANDIDATE_VALIDATING); + this.progressBar.reset(parentsToCandidates.size); const conflictedOutputPaths = this.validateUniqueOutputPaths(dat, parentsToCandidates); if (conflictedOutputPaths.length > 0) { @@ -47,7 +47,11 @@ export default class CandidateValidator extends Module { .reduce((map, releaseCandidate) => { releaseCandidate.getRomsWithFiles().forEach((romWithFiles) => { const key = romWithFiles.getOutputFile().getFilePath(); - map.set(key, [...(map.get(key) ?? []), releaseCandidate]); + if (!map.has(key)) { + map.set(key, [releaseCandidate]); + } else { + map.get(key)?.push(releaseCandidate); + } }); return map; }, new Map()); diff --git a/src/modules/candidateWriter.ts b/src/modules/candidateWriter.ts index cb11e8475..1bceec2dc 100644 --- a/src/modules/candidateWriter.ts +++ b/src/modules/candidateWriter.ts @@ -5,6 +5,7 @@ import { Semaphore } from 'async-mutex'; import ProgressBar, { ProgressBarSymbol } from '../console/progressBar.js'; import ElasticSemaphore from '../elasticSemaphore.js'; import Defaults from '../globals/defaults.js'; +import KeyedMutex from '../keyedMutex.js'; import ArrayPoly from '../polyfill/arrayPoly.js'; import fsPoly from '../polyfill/fsPoly.js'; import DAT from '../types/dats/dat.js'; @@ -26,6 +27,7 @@ export interface CandidateWriterResults { * Copy or move output ROM files, if applicable. */ export default class CandidateWriter extends Module { + // The maximum number of candidates that can be written at once private static readonly THREAD_SEMAPHORE = new Semaphore(Number.MAX_SAFE_INTEGER); // WARN(cemmer): there is an undocumented semaphore max value that can be used, the full @@ -34,6 +36,12 @@ export default class CandidateWriter extends Module { Defaults.MAX_READ_WRITE_CONCURRENT_KILOBYTES, ); + // When moving input files, process input file paths exclusively + private static readonly MOVE_MUTEX = new KeyedMutex(1000); + + // When moving input files, keep track of files that have been moved + private static readonly FILE_PATH_MOVES = new Map(); + private readonly options: Options; private readonly filesQueuedForDeletion: File[] = []; @@ -88,15 +96,19 @@ export default class CandidateWriter extends Module { const totalCandidateCount = [...parentsToWritableCandidates.values()].flat().length; this.progressBar.logTrace(`${dat.getNameShort()}: writing ${totalCandidateCount.toLocaleString()} candidate${totalCandidateCount !== 1 ? 's' : ''}`); - await this.progressBar.setSymbol(ProgressBarSymbol.WRITING); - await this.progressBar.reset(parentsToWritableCandidates.size); + if (this.options.shouldTest() && !this.options.getOverwrite()) { + this.progressBar.setSymbol(ProgressBarSymbol.TESTING); + } else { + this.progressBar.setSymbol(ProgressBarSymbol.WRITING); + } + this.progressBar.reset(parentsToWritableCandidates.size); await Promise.all([...parentsToWritableCandidates.entries()].map( async ([ parent, releaseCandidates, ]) => CandidateWriter.THREAD_SEMAPHORE.runExclusive(async () => { - await this.progressBar.incrementProgress(); + this.progressBar.incrementProgress(); this.progressBar.logTrace(`${dat.getNameShort()}: ${parent.getName()} (parent): writing ${releaseCandidates.length.toLocaleString()} candidate${releaseCandidates.length !== 1 ? 's' : ''}`); for (const releaseCandidate of releaseCandidates) { @@ -104,7 +116,7 @@ export default class CandidateWriter extends Module { } this.progressBar.logTrace(`${dat.getNameShort()}: ${parent.getName()} (parent): done writing ${releaseCandidates.length.toLocaleString()} candidate${releaseCandidates.length !== 1 ? 's' : ''}`); - await this.progressBar.incrementDone(); + this.progressBar.incrementDone(); }), )); @@ -201,6 +213,7 @@ export default class CandidateWriter extends Module { } } + this.progressBar.setSymbol(ProgressBarSymbol.WRITING); let written = false; for (let i = 0; i <= this.options.getWriteRetry(); i += 1) { written = await this.writeZipFile( @@ -286,11 +299,6 @@ export default class CandidateWriter extends Module { } // Check checksum - if (expectedFile.getCrc32() === '00000000') { - this.progressBar.logWarn(`${dat.getNameShort()}: ${releaseCandidate.getName()}: ${expectedFile.toString()}: can't test, expected CRC is unknown`); - // eslint-disable-next-line no-continue - continue; - } const actualFile = actualEntriesByPath.get(entryPath) as ArchiveEntry; if (actualFile.getSha256() && expectedFile.getSha256() @@ -312,6 +320,7 @@ export default class CandidateWriter extends Module { } if (actualFile.getCrc32() && expectedFile.getCrc32() + && expectedFile.getCrc32() !== '00000000' && actualFile.getCrc32() !== expectedFile.getCrc32() ) { return `has the CRC32 ${actualFile.getCrc32()}, expected ${expectedFile.getCrc32()}`; @@ -340,7 +349,7 @@ export default class CandidateWriter extends Module { outputZip: Zip, inputToOutputZipEntries: [File, ArchiveEntry][], ): Promise { - this.progressBar.logInfo(`${dat.getNameShort()}: ${releaseCandidate.getName()}: creating zip archive '${outputZip.getFilePath()}' with the entries:\n${inputToOutputZipEntries.map(([input, output]) => ` '${input.toString()}' (${fsPoly.sizeReadable(input.getSize())}) -> '${output.getEntryPath()}'`).join('\n')}`); + this.progressBar.logInfo(`${dat.getNameShort()}: ${releaseCandidate.getName()}: creating zip archive '${outputZip.getFilePath()}' with the entries:\n${inputToOutputZipEntries.map(([input, output]) => ` '${input.toString()}' (${fsPoly.sizeReadable(input.getSize())}) โ†’ '${output.getEntryPath()}'`).join('\n')}`); try { await CandidateWriter.ensureOutputDirExists(outputZip.getFilePath()); @@ -388,10 +397,12 @@ export default class CandidateWriter extends Module { // the same input archive at the same time, to benefit from batch extraction. const uniqueInputToOutputEntriesMap = uniqueInputToOutputEntries .reduce((map, [inputRomFile, outputRomFile]) => { - map.set(inputRomFile.getFilePath(), [ - ...(map.get(inputRomFile.getFilePath()) ?? []), - [inputRomFile, outputRomFile], - ]); + const key = inputRomFile.getFilePath(); + if (!map.has(key)) { + map.set(key, [[inputRomFile, outputRomFile]]); + } else { + map.get(key)?.push([inputRomFile, outputRomFile]); + } return map; }, new Map()); for (const groupedInputToOutput of uniqueInputToOutputEntriesMap.values()) { @@ -412,10 +423,18 @@ export default class CandidateWriter extends Module { inputRomFile: File, outputRomFile: File, ): Promise { - // Input and output are the exact same, do nothing + // Input and output are the exact same, maybe do nothing if (outputRomFile.equals(inputRomFile)) { - this.progressBar.logDebug(`${dat.getNameShort()}: ${releaseCandidate.getName()}: ${outputRomFile}: input and output file is the same, skipping`); - return; + const wasMoved = this.options.shouldMove() + && await CandidateWriter.MOVE_MUTEX.runExclusiveForKey( + inputRomFile.getFilePath(), + () => CandidateWriter.FILE_PATH_MOVES.get(inputRomFile.getFilePath()), + ) !== undefined; + + if (!wasMoved) { + this.progressBar.logDebug(`${dat.getNameShort()}: ${releaseCandidate.getName()}: ${outputRomFile}: input and output file is the same, skipping`); + return; + } } const outputFilePath = outputRomFile.getFilePath(); @@ -441,9 +460,14 @@ export default class CandidateWriter extends Module { } } + this.progressBar.setSymbol(ProgressBarSymbol.WRITING); let written = false; for (let i = 0; i <= this.options.getWriteRetry(); i += 1) { - written = await this.writeRawFile(dat, releaseCandidate, inputRomFile, outputFilePath); + if (this.options.shouldMove()) { + written = await this.moveRawFile(dat, releaseCandidate, inputRomFile, outputFilePath); + } else { + written = await this.copyRawFile(dat, releaseCandidate, inputRomFile, outputFilePath); + } if (written && !this.options.shouldTest()) { // Successfully written, unknown if valid @@ -476,22 +500,65 @@ export default class CandidateWriter extends Module { this.enqueueFileDeletion(inputRomFile); } - private async writeRawFile( + private async moveRawFile( dat: DAT, releaseCandidate: ReleaseCandidate, inputRomFile: File, outputFilePath: string, ): Promise { - this.progressBar.logInfo(`${dat.getNameShort()}: ${releaseCandidate.getName()}: copying file '${inputRomFile.toString()}' (${fsPoly.sizeReadable(inputRomFile.getSize())}) -> '${outputFilePath}'`); + // Lock the input file, we can't handle concurrent moves + return CandidateWriter.MOVE_MUTEX.runExclusiveForKey(inputRomFile.getFilePath(), async () => { + const movedFilePath = CandidateWriter.FILE_PATH_MOVES.get(inputRomFile.getFilePath()); + if (movedFilePath) { + // The file was already moved, we shouldn't move it again + return this.copyRawFile( + dat, + releaseCandidate, + inputRomFile.withFilePath(movedFilePath), + outputFilePath, + ); + } + + if (inputRomFile instanceof ArchiveEntry + || inputRomFile.getFileHeader() !== undefined + || inputRomFile.getPatch() !== undefined + ) { + // The file can't be moved as-is, it needs to get copied + return this.copyRawFile(dat, releaseCandidate, inputRomFile, outputFilePath); + } + + this.progressBar.logInfo(`${dat.getNameShort()}: ${releaseCandidate.getName()}: moving file '${inputRomFile.toString()}' (${fsPoly.sizeReadable(inputRomFile.getSize())}) โ†’ '${outputFilePath}'`); + + try { + await CandidateWriter.ensureOutputDirExists(outputFilePath); + + await fsPoly.mv(inputRomFile.getFilePath(), outputFilePath); + CandidateWriter.FILE_PATH_MOVES.set(inputRomFile.getFilePath(), outputFilePath); + return true; + } catch (error) { + this.progressBar.logError(`${dat.getNameShort()}: ${releaseCandidate.getName()}: failed to move file '${inputRomFile.toString()}' โ†’ '${outputFilePath}': ${error}`); + return false; + } + }); + } + + private async copyRawFile( + dat: DAT, + releaseCandidate: ReleaseCandidate, + inputRomFile: File, + outputFilePath: string, + ): Promise { + this.progressBar.logInfo(`${dat.getNameShort()}: ${releaseCandidate.getName()}: ${inputRomFile instanceof ArchiveEntry ? 'extracting' : 'copying'} file '${inputRomFile.toString()}' (${fsPoly.sizeReadable(inputRomFile.getSize())}) โ†’ '${outputFilePath}'`); try { await CandidateWriter.ensureOutputDirExists(outputFilePath); + const tempRawFile = await fsPoly.mktemp(outputFilePath); await inputRomFile.extractAndPatchToFile(tempRawFile); await fsPoly.mv(tempRawFile, outputFilePath); return true; } catch (error) { - this.progressBar.logError(`${dat.getNameShort()}: ${releaseCandidate.getName()}: ${outputFilePath}: failed to copy from ${inputRomFile.toString()}: ${error}`); + this.progressBar.logError(`${dat.getNameShort()}: ${releaseCandidate.getName()}: failed to ${inputRomFile instanceof ArchiveEntry ? 'extract' : 'copy'} file '${inputRomFile.toString()}' โ†’ '${outputFilePath}': ${error}`); return false; } } @@ -505,14 +572,15 @@ export default class CandidateWriter extends Module { this.progressBar.logTrace(`${dat.getNameShort()}: ${releaseCandidate.getName()}: ${outputFilePath}: testing raw file`); // Check checksum - if (expectedFile.getCrc32() === '00000000') { - this.progressBar.logWarn(`${dat.getNameShort()}: ${releaseCandidate.getName()}: ${outputFilePath}: can't test, expected CRC is unknown`); - return undefined; + let actualFile: File; + try { + actualFile = await File.fileOf( + { filePath: outputFilePath }, + expectedFile.getChecksumBitmask(), + ); + } catch (error) { + return `failed to parse: ${error}`; } - const actualFile = await File.fileOf( - { filePath: outputFilePath }, - expectedFile.getChecksumBitmask(), - ); if (actualFile.getSha256() && expectedFile.getSha256() && actualFile.getSha256() !== expectedFile.getSha256() @@ -533,6 +601,7 @@ export default class CandidateWriter extends Module { } if (actualFile.getCrc32() && expectedFile.getCrc32() + && expectedFile.getCrc32() !== '00000000' && actualFile.getCrc32() !== expectedFile.getCrc32() ) { return `has the CRC32 ${actualFile.getCrc32()}, expected ${expectedFile.getCrc32()}`; @@ -626,6 +695,7 @@ export default class CandidateWriter extends Module { await fsPoly.rm(linkPath, { force: true }); } + this.progressBar.setSymbol(ProgressBarSymbol.WRITING); for (let i = 0; i <= this.options.getWriteRetry(); i += 1) { const written = await this.writeRawLink(dat, releaseCandidate, targetPath, linkPath); @@ -667,10 +737,10 @@ export default class CandidateWriter extends Module { try { await CandidateWriter.ensureOutputDirExists(linkPath); if (this.options.getSymlink()) { - this.progressBar.logInfo(`${dat.getNameShort()}: ${releaseCandidate.getName()}: creating symlink '${targetPath}' -> '${linkPath}'`); + this.progressBar.logInfo(`${dat.getNameShort()}: ${releaseCandidate.getName()}: creating symlink '${targetPath}' โ†’ '${linkPath}'`); await fsPoly.symlink(targetPath, linkPath); } else { - this.progressBar.logInfo(`${dat.getNameShort()}: ${releaseCandidate.getName()}: creating hard link '${targetPath}' -> '${linkPath}'`); + this.progressBar.logInfo(`${dat.getNameShort()}: ${releaseCandidate.getName()}: creating hard link '${targetPath}' โ†’ '${linkPath}'`); await fsPoly.hardlink(targetPath, linkPath); } return true; diff --git a/src/modules/datFilter.ts b/src/modules/datFilter.ts index 28cc7dfbd..22bf06c8c 100644 --- a/src/modules/datFilter.ts +++ b/src/modules/datFilter.ts @@ -20,7 +20,7 @@ export default class DATFilter extends Module { /** * Create a new DAT after filtering. */ - async filter(dat: DAT): Promise { + filter(dat: DAT): DAT { // Return early if there aren't any games if (dat.getGames().length === 0) { this.progressBar.logTrace(`${dat.getNameShort()}: no games to filter`); @@ -28,8 +28,8 @@ export default class DATFilter extends Module { } this.progressBar.logTrace(`${dat.getNameShort()}: filtering DAT`); - await this.progressBar.setSymbol(ProgressBarSymbol.FILTERING); - await this.progressBar.reset(dat.getGames().length); + this.progressBar.setSymbol(ProgressBarSymbol.CANDIDATE_FILTERING); + this.progressBar.reset(dat.getGames().length); const filteredGames = dat.getParents().flatMap((parent) => { const games = parent.getGames().filter((game) => this.filterGame(game)); diff --git a/src/modules/datGameInferrer.ts b/src/modules/datGameInferrer.ts index 849df88a2..d015fac78 100644 --- a/src/modules/datGameInferrer.ts +++ b/src/modules/datGameInferrer.ts @@ -1,5 +1,8 @@ +import fs from 'node:fs'; import path from 'node:path'; +import util from 'node:util'; +import { parse } from '@gplane/cue'; import moment from 'moment'; import ProgressBar from '../console/progressBar.js'; @@ -34,7 +37,7 @@ export default class DATGameInferrer extends Module { /** * Infer {@link Game}s from input files. */ - infer(romFiles: File[]): DAT[] { + async infer(romFiles: File[]): Promise { this.progressBar.logTrace(`inferring DATs for ${romFiles.length.toLocaleString()} ROM${romFiles.length !== 1 ? 's' : ''}`); const normalizedInputPaths = this.options.getInputPaths() @@ -50,21 +53,24 @@ export default class DATGameInferrer extends Module { .filter((inputPath) => normalizedPath.startsWith(inputPath)); (matchedInputPaths.length > 0 ? matchedInputPaths : [DATGameInferrer.DEFAULT_DAT_NAME]) .forEach((inputPath) => { - const datRomFiles = [...(map.get(inputPath) ?? []), file]; - map.set(inputPath, datRomFiles); + if (!map.has(inputPath)) { + map.set(inputPath, [file]); + } else { + map.get(inputPath)?.push(file); + } }); return map; }, new Map()); this.progressBar.logTrace(`inferred ${inputPathsToRomFiles.size.toLocaleString()} DAT${inputPathsToRomFiles.size !== 1 ? 's' : ''}`); - const dats = [...inputPathsToRomFiles.entries()] - .map(([inputPath, datRomFiles]) => DATGameInferrer.createDAT(inputPath, datRomFiles)); + const dats = await Promise.all([...inputPathsToRomFiles.entries()] + .map(async ([inputPath, datRomFiles]) => this.createDAT(inputPath, datRomFiles))); this.progressBar.logTrace('done inferring DATs'); return dats; } - private static createDAT(inputPath: string, romFiles: File[]): DAT { + private async createDAT(inputPath: string, romFiles: File[]): Promise { const datName = path.basename(inputPath); const date = moment().format('YYYYMMDD-HHmmss'); const header = new Header({ @@ -80,50 +86,47 @@ export default class DATGameInferrer extends Module { ].join('\n'), }); - // For all non-archived files, group files of the same filename without extension together - const gameNamesToRawFiles = romFiles - .filter((file) => !(file instanceof ArchiveEntry)) - .reduce((map, file) => { - const gameName = DATGameInferrer.getGameName(file); - map.set(gameName, [...(map.get(gameName) ?? []), file]); - return map; - }, new Map()); + let remainingRomFiles = romFiles; + let gameNamesToRomFiles: [string, File[]][] = []; - // For archives, assume the entire archive is one game - const archivePathsToArchiveEntries = romFiles - .filter((file) => file instanceof ArchiveEntry) - .reduce((map, file) => { - const archivePath = file.getFilePath(); - map.set(archivePath, [...(map.get(archivePath) ?? []), file]); - return map; - }, new Map[]>()); - const gameNamesToArchiveEntries = [...archivePathsToArchiveEntries.values()] - .map((archiveEntries) => { - const gameName = DATGameInferrer.getGameName(archiveEntries[0]); - return [gameName, archiveEntries] satisfies [string, ArchiveEntry[]]; - }); + // For each inference strategy + const inferFunctions = [ + this.inferArchiveEntries, + this.inferBinCueFiles, + this.inferGdiFiles, + this.inferRawFiles, + ]; + for (const inferFunction of inferFunctions) { + // Infer the games and their files) + const result = await inferFunction.bind(this)(remainingRomFiles); - const games = [ - ...gameNamesToRawFiles.entries(), - ...gameNamesToArchiveEntries, - ] - .map(([gameName, gameRomFiles]) => { - const roms = gameRomFiles - .map((romFile) => new ROM({ - name: path.basename(romFile.getExtractedFilePath()), - size: romFile.getSize(), - crc32: romFile.getCrc32(), - md5: romFile.getMd5(), - sha1: romFile.getSha1(), - sha256: romFile.getSha256(), - })) - .filter(ArrayPoly.filterUniqueMapped((rom) => rom.getName())); - return new Game({ - name: gameName, - description: gameName, - rom: roms, - }); - }) + // Update the list of results + gameNamesToRomFiles = [...gameNamesToRomFiles, ...result]; + + // Remove the consumed files from further inference + const consumedFiles = new Set(result + .flatMap(([, resultFiles]) => resultFiles) + .map((file) => file.toString())); + remainingRomFiles = remainingRomFiles.filter((file) => !consumedFiles.has(file.toString())); + } + + const games = gameNamesToRomFiles.map(([gameName, gameRomFiles]) => { + const roms = gameRomFiles + .map((romFile) => new ROM({ + name: path.basename(romFile.getExtractedFilePath()), + size: romFile.getSize(), + crc32: romFile.getCrc32(), + md5: romFile.getMd5(), + sha1: romFile.getSha1(), + sha256: romFile.getSha256(), + })) + .filter(ArrayPoly.filterUniqueMapped((rom) => rom.getName())); + return new Game({ + name: gameName, + description: gameName, + rom: roms, + }); + }) // Filter out duplicate games .filter(ArrayPoly.filterUniqueMapped((game) => game.hashCode())); @@ -143,4 +146,139 @@ export default class DATGameInferrer extends Module { .replace(/(\.[a-z0-9]+)+$/, '') .trim(); } + + private inferArchiveEntries(romFiles: File[]): [string, ArchiveEntry[]][] { + this.progressBar.logTrace(`inferring games from archives from ${romFiles.length.toLocaleString()} file${romFiles.length !== 1 ? 's' : ''}`); + + // For archives, assume the entire archive is one game + const archivePathsToArchiveEntries = romFiles + .filter((file) => file instanceof ArchiveEntry) + .reduce((map, file) => { + const archivePath = file.getFilePath(); + if (!map.has(archivePath)) { + map.set(archivePath, [file]); + } else { + map.get(archivePath)?.push(file); + } + return map; + }, new Map[]>()); + + const results = [...archivePathsToArchiveEntries.values()] + .map((archiveEntries) => { + const gameName = DATGameInferrer.getGameName(archiveEntries[0]); + return [gameName, archiveEntries] satisfies [string, ArchiveEntry[]]; + }); + + this.progressBar.logTrace(`inferred ${results.length.toLocaleString()} games from archives`); + return results; + } + + private async inferBinCueFiles(romFiles: File[]): Promise<[string, File[]][]> { + const rawFiles = romFiles.filter((file) => !(file instanceof ArchiveEntry)); + this.progressBar.logTrace(`inferring games from cue files from ${rawFiles.length.toLocaleString()} non-archive${rawFiles.length !== 1 ? 's' : ''}`); + + const rawFilePathsToFiles = rawFiles + .reduce((map, file) => { + map.set(file.getFilePath(), file); + return map; + }, new Map()); + + const results = (await Promise.all(rawFiles + .filter((file) => file.getExtractedFilePath().toLowerCase().endsWith('.cue')) + .map(async (cueFile): Promise<[string, File[]] | undefined> => { + try { + const cueData = await util.promisify(fs.readFile)(cueFile.getFilePath()); + + const cueSheet = parse(cueData.toString(), { + fatal: true, + }).sheet; + + const binFiles = cueSheet.files + .map((binFile) => path.join(path.dirname(cueFile.getFilePath()), binFile.name)) + .map((binFilePath) => rawFilePathsToFiles.get(binFilePath)) + .filter((file) => file !== undefined); + if (binFiles.length === 0) { + return undefined; + } + + const gameName = DATGameInferrer.getGameName(cueFile); + return [gameName, [cueFile, ...binFiles]]; + } catch { + return undefined; + } + }))) + .filter((result) => result !== undefined); + + this.progressBar.logTrace(`inferred ${results.length.toLocaleString()} games from cue files`); + return results; + } + + private async inferGdiFiles(romFiles: File[]): Promise<[string, File[]][]> { + const rawFiles = romFiles.filter((file) => !(file instanceof ArchiveEntry)); + this.progressBar.logTrace(`inferring games from gdi files from ${rawFiles.length.toLocaleString()} non-archive${rawFiles.length !== 1 ? 's' : ''}`); + + const rawFilePathsToFiles = rawFiles + .reduce((map, file) => { + map.set(file.getFilePath(), file); + return map; + }, new Map()); + + const results = (await Promise.all(rawFiles + .filter((file) => file.getExtractedFilePath().toLowerCase().endsWith('.gdi')) + .map(async (gdiFile): Promise<[string, File[]] | undefined> => { + try { + const cueData = await util.promisify(fs.readFile)(gdiFile.getFilePath()); + + const { name: filePrefix } = path.parse(gdiFile.getFilePath()); + const gdiContents = `${cueData.toString() + .split(/\r?\n/) + .filter((line) => line) + // Replace the chdman-generated track files with TOSEC-style track filenames + .map((line) => line + .replace(filePrefix, 'track') + .replace(/"/g, '')) + .join('\r\n')}\r\n`; + + const trackFilePaths = gdiContents.trim() + .split(/\r?\n/) + .slice(1) + .map((line) => line.split(' ')[4]); + const trackFiles = trackFilePaths + .map((trackFilePath) => path.join(path.dirname(gdiFile.getFilePath()), trackFilePath)) + .map((trackFilePath) => rawFilePathsToFiles.get(trackFilePath)) + .filter((file) => file !== undefined); + if (trackFiles.length === 0) { + return undefined; + } + + const gameName = DATGameInferrer.getGameName(gdiFile); + return [gameName, [gdiFile, ...trackFiles]]; + } catch { + return undefined; + } + }))) + .filter((result) => result !== undefined); + + this.progressBar.logTrace(`inferred ${results.length.toLocaleString()} games from cue files`); + return results; + } + + private inferRawFiles(romFiles: File[]): [string, File[]][] { + this.progressBar.logTrace(`inferring games from raw files from ${romFiles.length.toLocaleString()} file${romFiles.length !== 1 ? 's' : ''}`); + + const results = romFiles + .filter((file) => !(file instanceof ArchiveEntry)) + .reduce((map, file) => { + const gameName = DATGameInferrer.getGameName(file); + if (!map.has(gameName)) { + map.set(gameName, [file]); + } else { + map.get(gameName)?.push(file); + } + return map; + }, new Map()); + + this.progressBar.logTrace(`inferred ${results.size.toLocaleString()} games from raw files`); + return [...results.entries()]; + } } diff --git a/src/modules/datMergerSplitter.ts b/src/modules/datMergerSplitter.ts index bec1f68ae..69e90e52f 100644 --- a/src/modules/datMergerSplitter.ts +++ b/src/modules/datMergerSplitter.ts @@ -23,7 +23,7 @@ export default class DATMergerSplitter extends Module { /** * Un-merge, split, or merge the {@link Game}s within a {@link DAT}. */ - async merge(dat: DAT): Promise { + merge(dat: DAT): DAT { // Don't do anything if no type provided if (this.options.getMergeRoms() === undefined) { this.progressBar.logTrace(`${dat.getNameShort()}: no ROM merge option provided, doing nothing`); @@ -42,8 +42,8 @@ export default class DATMergerSplitter extends Module { }, new Map()); this.progressBar.logTrace(`${dat.getNameShort()}: merging & splitting ${dat.getGames().length.toLocaleString()} game${dat.getGames().length !== 1 ? 's' : ''}`); - await this.progressBar.setSymbol(ProgressBarSymbol.MERGE_SPLIT); - await this.progressBar.reset(dat.getGames().length); + this.progressBar.setSymbol(ProgressBarSymbol.DAT_MERGE_SPLIT); + this.progressBar.reset(dat.getGames().length); const newGames = dat.getParents() .flatMap((parent) => this.mergeParent(dat, parent, gameNamesToGames)); @@ -65,6 +65,9 @@ export default class DATMergerSplitter extends Module { // Get rid of duplicate ROMs. MAME will sometimes duplicate a file with the exact same // name, size, and checksum but with a different "region" (e.g. neogeo). .filter(ArrayPoly.filterUniqueMapped((rom) => rom.getName())), + disk: game.getDisks() + // Get rid of ROMs that haven't been dumped yet + .filter((disk) => disk.getStatus() !== 'nodump'), })); // 'full' types expect device ROMs to be included @@ -81,7 +84,7 @@ export default class DATMergerSplitter extends Module { .reduce(ArrayPoly.reduceUnique(), []) // Get ROMs from the DeviceRef .map((deviceRefName) => gameNamesToGames.get(deviceRefName)) - .filter(ArrayPoly.filterNotNullish) + .filter((deviceGame) => deviceGame !== undefined) .flatMap((deviceGame) => deviceGame.getRoms() .filter((rom) => rom.getStatus() !== 'nodump')), ...game.getRoms(), @@ -115,33 +118,33 @@ export default class DATMergerSplitter extends Module { } return game.withProps({ - rom: DATMergerSplitter.diffGameRoms(biosGame, game), + rom: DATMergerSplitter.diffGameRoms(biosGame.getRoms(), game.getRoms()), }); }); } - // 'split' and 'merged' types should exclude ROMs found in their parent + // 'split' and 'merged' types should exclude ROMs & disks found in their parent if (this.options.getMergeRoms() === MergeMode.SPLIT || this.options.getMergeRoms() === MergeMode.MERGED ) { - games = games - .map((game) => { - if (!game.getParent()) { - // This game doesn't have a parent - return game; - } + games = games.map((game) => { + if (!game.getParent()) { + // This game doesn't have a parent + return game; + } - const parentGame = gameNamesToGames.get(game.getParent()); - if (!parentGame) { - // Invalid cloneOf attribute, parent not found - this.progressBar.logTrace(`${dat.getNameShort()}: ${game.getName()} references an invalid parent: ${game.getParent()}`); - return game; - } + const parentGame = gameNamesToGames.get(game.getParent()); + if (!parentGame) { + // Invalid cloneOf attribute, parent not found + this.progressBar.logTrace(`${dat.getNameShort()}: ${game.getName()} references an invalid parent: ${game.getParent()}`); + return game; + } - return game.withProps({ - rom: DATMergerSplitter.diffGameRoms(parentGame, game), - }); + return game.withProps({ + rom: DATMergerSplitter.diffGameRoms(parentGame.getRoms(), game.getRoms()), + disk: DATMergerSplitter.diffGameRoms(parentGame.getDisks(), game.getDisks()), }); + }); } const parentGame = games.find((game) => game.isParent()); @@ -173,13 +176,13 @@ export default class DATMergerSplitter extends Module { })]; } - private static diffGameRoms(parent: Game, child: Game): ROM[] { - const parentRomNamesToHashCodes = parent.getRoms().reduce((map, rom) => { + private static diffGameRoms(parentRoms: ROM[], childRoms: ROM[]): ROM[] { + const parentRomNamesToHashCodes = parentRoms.reduce((map, rom) => { map.set(rom.getName(), rom.hashCode()); return map; }, new Map()); - return child.getRoms().filter((rom) => { + return childRoms.filter((rom) => { const parentName = rom.getMerge() ?? rom.getName(); const parentHashCode = parentRomNamesToHashCodes.get(parentName); if (!parentHashCode) { diff --git a/src/modules/datParentInferrer.ts b/src/modules/datParentInferrer.ts index 21adbb86d..5cdcac2f3 100644 --- a/src/modules/datParentInferrer.ts +++ b/src/modules/datParentInferrer.ts @@ -20,7 +20,7 @@ export default class DATParentInferrer extends Module { /** * Infer {@link Parent}s from {@link Game}s. */ - async infer(dat: DAT): Promise { + infer(dat: DAT): DAT { if (dat.hasParentCloneInfo() && !this.options.getDatIgnoreParentClone()) { this.progressBar.logTrace(`${dat.getNameShort()}: DAT has parent/clone info, skipping`); return dat; @@ -32,15 +32,19 @@ export default class DATParentInferrer extends Module { } this.progressBar.logTrace(`${dat.getNameShort()}: inferring parents for ${dat.getGames().length.toLocaleString()} game${dat.getGames().length !== 1 ? 's' : ''}`); - await this.progressBar.setSymbol(ProgressBarSymbol.GROUPING_SIMILAR); - await this.progressBar.reset(dat.getGames().length); + this.progressBar.setSymbol(ProgressBarSymbol.DAT_GROUPING_SIMILAR); + this.progressBar.reset(dat.getGames().length); // Group games by their stripped names const strippedNamesToGames = dat.getGames().reduce((map, game) => { let strippedGameName = game.getName(); strippedGameName = DATParentInferrer.stripGameRegionAndLanguage(strippedGameName); strippedGameName = DATParentInferrer.stripGameVariants(strippedGameName); - map.set(strippedGameName, [...(map.get(strippedGameName) ?? []), game]); + if (!map.has(strippedGameName)) { + map.set(strippedGameName, [game]); + } else { + map.get(strippedGameName)?.push(game); + } return map; }, new Map()); const groupedGames = [...strippedNamesToGames.entries()] @@ -60,6 +64,7 @@ export default class DATParentInferrer extends Module { // ***** Regions ***** .replace(new RegExp(`\\(((${Internationalization.REGION_CODES.join('|')})[,+-]? ?)+\\)`, 'i'), '') .replace(new RegExp(`\\(((${Internationalization.REGION_NAMES.join('|')})[,+-]? ?)+\\)`, 'i'), '') + .replace(/\(Latin America\)/i, '') // ***** Languages ***** .replace(new RegExp(`\\(((${Internationalization.LANGUAGES.join('|')})[,+-]? ?)+\\)`, 'i'), '') // ***** Cleanup ***** @@ -73,6 +78,7 @@ export default class DATParentInferrer extends Module { .replace(/\(Alt( [a-z0-9. ]*)?\)/i, '') .replace(/\([^)]*Collector's Edition\)/i, '') .replace(/\(Extra Box\)/i, '') + .replace(/ - European Version/i, '') .replace(/\(Fukkokuban\)/i, '') // "reprint" .replace(/\([^)]*Genteiban\)/i, '') // "limited edition" .replace(/\(Limited[^)]+Edition\)/i, '') @@ -152,8 +158,8 @@ export default class DATParentInferrer extends Module { // Nintendo - Super Nintendo Entertainment System .replace(/\(NP\)/i, '') // "Nintendo Power" // Sega - Dreamcast - .replace(/\[[0-9]+S\]/, '') // boxcode - .replace(/\[[0-9]+MM?[0-9]+(, [0-9]+MM?[0-9]+)*\]/, '') + .replace(/\[([0-9A-Z ]+(, )?)+\]$/, '') // TOSEC boxcode + .replace(/\[(compilation|data identical to retail|fixed version|keyboard|limited edition|req\. microphone|scrambled|unscrambled|white label)\]/ig, '') // TOSEC .replace(/for Dreamcast/i, '') // Sega - Mega Drive / Genesis .replace(/\(MP\)/i, '') // "MegaPlay version" diff --git a/src/modules/datScanner.ts b/src/modules/datScanner.ts index 1565502c3..506281699 100644 --- a/src/modules/datScanner.ts +++ b/src/modules/datScanner.ts @@ -5,12 +5,17 @@ import { parse } from '@fast-csv/parse'; import ProgressBar, { ProgressBarSymbol } from '../console/progressBar.js'; import DriveSemaphore from '../driveSemaphore.js'; -import ArrayPoly from '../polyfill/arrayPoly.js'; import bufferPoly from '../polyfill/bufferPoly.js'; import fsPoly from '../polyfill/fsPoly.js'; -import CMProParser, { DATProps, GameProps, ROMProps } from '../types/dats/cmpro/cmProParser.js'; +import CMProParser, { + DATProps, + DiskProps, + GameProps, + ROMProps, +} from '../types/dats/cmpro/cmProParser.js'; import DAT from '../types/dats/dat.js'; import DATObject, { DATObjectProps } from '../types/dats/datObject.js'; +import Disk from '../types/dats/disk.js'; import Game from '../types/dats/game.js'; import Header from '../types/dats/logiqx/header.js'; import LogiqxDAT from '../types/dats/logiqx/logiqxDat.js'; @@ -19,8 +24,10 @@ import ROM from '../types/dats/rom.js'; import SoftwareListDAT from '../types/dats/softwarelist/softwareListDat.js'; import SoftwareListsDAT from '../types/dats/softwarelist/softwareListsDat.js'; import ExpectedError from '../types/expectedError.js'; +import ArchiveEntry from '../types/files/archives/archiveEntry.js'; import File from '../types/files/file.js'; import { ChecksumBitmask } from '../types/files/fileChecksums.js'; +import FileFactory from '../types/files/fileFactory.js'; import Options from '../types/options.js'; import Scanner from './scanner.js'; @@ -38,8 +45,8 @@ type SmdbRow = { * representation. */ export default class DATScanner extends Scanner { - constructor(options: Options, progressBar: ProgressBar) { - super(options, progressBar, DATScanner.name); + constructor(options: Options, progressBar: ProgressBar, fileFactory: FileFactory) { + super(options, progressBar, fileFactory, DATScanner.name); } /** @@ -47,17 +54,17 @@ export default class DATScanner extends Scanner { */ async scan(): Promise { this.progressBar.logTrace('scanning DAT files'); - await this.progressBar.setSymbol(ProgressBarSymbol.SEARCHING); - await this.progressBar.reset(0); + this.progressBar.setSymbol(ProgressBarSymbol.FILE_SCANNING); + this.progressBar.reset(0); - const datFilePaths = await this.options.scanDatFilesWithoutExclusions(async (increment) => { - await this.progressBar.incrementTotal(increment); + const datFilePaths = await this.options.scanDatFilesWithoutExclusions((increment) => { + this.progressBar.incrementTotal(increment); }); if (datFilePaths.length === 0) { return []; } this.progressBar.logTrace(`found ${datFilePaths.length.toLocaleString()} DAT file${datFilePaths.length !== 1 ? 's' : ''}`); - await this.progressBar.reset(datFilePaths.length); + this.progressBar.reset(datFilePaths.length); this.progressBar.logTrace('enumerating DAT archives'); const datFiles = await this.getUniqueFilesFromPaths( @@ -65,9 +72,10 @@ export default class DATScanner extends Scanner { this.options.getReaderThreads(), ChecksumBitmask.NONE, ); - await this.progressBar.reset(datFiles.length); + this.progressBar.reset(datFiles.length); const downloadedDats = await this.downloadDats(datFiles); + this.progressBar.reset(downloadedDats.length); const parsedDats = await this.parseDatFiles(downloadedDats); this.progressBar.logTrace('done scanning DAT files'); @@ -80,7 +88,7 @@ export default class DATScanner extends Scanner { } this.progressBar.logTrace('downloading DATs from URLs'); - await this.progressBar.setSymbol(ProgressBarSymbol.DOWNLOADING); + this.progressBar.setSymbol(ProgressBarSymbol.DAT_DOWNLOADING); return (await Promise.all(datFiles.map(async (datFile) => { if (!datFile.isURL()) { @@ -89,6 +97,7 @@ export default class DATScanner extends Scanner { try { this.progressBar.logTrace(`${datFile.toString()}: downloading`); + // TODO(cemmer): these never get deleted? const downloadedDatFile = await datFile.downloadToTempPath('dat'); this.progressBar.logTrace(`${datFile.toString()}: downloaded to '${downloadedDatFile.toString()}'`); return await this.getFilesFromPaths( @@ -105,12 +114,12 @@ export default class DATScanner extends Scanner { // Parse each file into a DAT private async parseDatFiles(datFiles: File[]): Promise { this.progressBar.logTrace(`parsing ${datFiles.length.toLocaleString()} DAT file${datFiles.length !== 1 ? 's' : ''}`); - await this.progressBar.setSymbol(ProgressBarSymbol.PARSING_CONTENTS); + this.progressBar.setSymbol(ProgressBarSymbol.DAT_PARSING); return (await new DriveSemaphore(this.options.getReaderThreads()).map( datFiles, async (datFile) => { - await this.progressBar.incrementProgress(); + this.progressBar.incrementProgress(); const waitingMessage = `${datFile.toString()} ...`; this.progressBar.addWaitingMessage(waitingMessage); @@ -121,7 +130,7 @@ export default class DATScanner extends Scanner { this.progressBar.logWarn(`${datFile.toString()}: failed to parse DAT file: ${error}`); } - await this.progressBar.incrementDone(); + this.progressBar.incrementDone(); this.progressBar.removeWaitingMessage(waitingMessage); if (dat && this.shouldFilterOut(dat)) { @@ -130,7 +139,7 @@ export default class DATScanner extends Scanner { return dat; }, )) - .filter(ArrayPoly.filterNotNullish) + .filter((dat) => dat !== undefined) .map((dat) => this.sanitizeDat(dat)) .sort((a, b) => a.getNameShort().localeCompare(b.getNameShort())); } @@ -138,7 +147,10 @@ export default class DATScanner extends Scanner { private async parseDatFile(datFile: File): Promise { let dat: DAT | undefined; - if (!dat && await fsPoly.isExecutable(datFile.getFilePath())) { + if (!dat + && !(datFile instanceof ArchiveEntry) + && await fsPoly.isExecutable(datFile.getFilePath()) + ) { dat = await this.parseMameListxml(datFile); } @@ -196,7 +208,7 @@ export default class DATScanner extends Scanner { output += chunk.toString(); }); - proc.on('exit', (code) => { + proc.on('close', (code) => { if (code !== null && code > 0) { reject(new Error(`exit code ${code}`)); return; @@ -333,6 +345,8 @@ export default class DATScanner extends Scanner { } const games = cmproDatGames.flatMap((game) => { + const gameName = game.name ?? game.comment; + let gameRoms: ROMProps[] = []; if (game.rom) { if (Array.isArray(game.rom)) { @@ -341,16 +355,29 @@ export default class DATScanner extends Scanner { gameRoms = [game.rom]; } } - const gameName = game.name ?? game.comment; + const roms = gameRoms.map((entry) => new ROM({ + name: entry.name ?? '', + size: Number.parseInt(entry.size ?? '0', 10), + crc32: entry.crc, + md5: entry.md5, + sha1: entry.sha1, + })); - const roms = gameRoms - .map((entry) => new ROM({ - name: entry.name ?? '', - size: Number.parseInt(entry.size ?? '0', 10), - crc32: entry.crc, - md5: entry.md5, - sha1: entry.sha1, - })); + let gameDisks: DiskProps[] = []; + if (game.disk) { + if (Array.isArray(game.disk)) { + gameDisks = game.disk; + } else { + gameDisks = [game.disk]; + } + } + const disks = gameDisks.map((entry) => new Disk({ + name: entry.name ?? '', + size: Number.parseInt(entry.size ?? '0', 10), + crc32: entry.crc, + md5: entry.md5, + sha1: entry.sha1, + })); return new Game({ name: gameName, @@ -365,6 +392,7 @@ export default class DATScanner extends Scanner { genre: game.genre?.toString(), release: undefined, rom: roms, + disk: disks, }); }); @@ -480,7 +508,7 @@ export default class DATScanner extends Scanner { const games = dat.getGames() .map((game) => { const roms = game.getRoms() - // ROMs have to have and at least one non-empty checksum + // Games have to have at least one ROM with a non-empty checksum .filter((rom) => this.options.shouldDir2Dat() || ( (rom.getCrc32() === undefined || rom.getCrc32() !== '00000000') && (rom.getMd5() === undefined || rom.getMd5() !== 'd41d8cd98f00b204e9800998ecf8427e') diff --git a/src/modules/dir2DatCreator.ts b/src/modules/dir2DatCreator.ts index 53a366811..b7b222dbd 100644 --- a/src/modules/dir2DatCreator.ts +++ b/src/modules/dir2DatCreator.ts @@ -34,8 +34,8 @@ export default class Dir2DatCreator extends Module { } this.progressBar.logTrace(`${dat.getNameShort()}: writing dir2dat`); - await this.progressBar.setSymbol(ProgressBarSymbol.WRITING); - await this.progressBar.reset(1); + this.progressBar.setSymbol(ProgressBarSymbol.WRITING); + this.progressBar.reset(1); const datDir = this.options.shouldWrite() ? OutputFactory.getDir(this.options, dat) @@ -52,7 +52,11 @@ export default class Dir2DatCreator extends Module { .flat() .reduce((map, releaseCandidate) => { const key = releaseCandidate.getGame(); - map.set(key, [...(map.get(key) ?? []), releaseCandidate]); + if (!map.has(key)) { + map.set(key, [releaseCandidate]); + } else { + map.get(key)?.push(releaseCandidate); + } return map; }, new Map()); const gamesFromCandidates = [...gamesToCandidates.entries()] diff --git a/src/modules/directoryCleaner.ts b/src/modules/directoryCleaner.ts index 190cd5b22..af7c31dfa 100644 --- a/src/modules/directoryCleaner.ts +++ b/src/modules/directoryCleaner.ts @@ -35,15 +35,15 @@ export default class DirectoryCleaner extends Module { } this.progressBar.logTrace('cleaning files in output'); - await this.progressBar.setSymbol(ProgressBarSymbol.SEARCHING); - await this.progressBar.reset(0); + this.progressBar.setSymbol(ProgressBarSymbol.FILE_SCANNING); + this.progressBar.reset(0); // If there is nothing to clean, then don't do anything const filesToClean = await this.options.scanOutputFilesWithoutCleanExclusions( dirsToClean, filesToExclude, - async (increment) => { - await this.progressBar.incrementTotal(increment); + (increment) => { + this.progressBar.incrementTotal(increment); }, ); if (filesToClean.length === 0) { @@ -51,11 +51,11 @@ export default class DirectoryCleaner extends Module { return []; } - await this.progressBar.setSymbol(ProgressBarSymbol.RECYCLING); + this.progressBar.setSymbol(ProgressBarSymbol.RECYCLING); try { this.progressBar.logTrace(`cleaning ${filesToClean.length.toLocaleString()} file${filesToClean.length !== 1 ? 's' : ''}`); - await this.progressBar.reset(filesToClean.length); + this.progressBar.reset(filesToClean.length); if (this.options.getCleanDryRun()) { this.progressBar.logInfo(`paths skipped from cleaning (dry run):\n${filesToClean.map((filePath) => ` ${filePath}`).join('\n')}`); } else { @@ -74,7 +74,7 @@ export default class DirectoryCleaner extends Module { try { let emptyDirs = await DirectoryCleaner.getEmptyDirs(dirsToClean); while (emptyDirs.length > 0) { - await this.progressBar.reset(emptyDirs.length); + this.progressBar.reset(emptyDirs.length); this.progressBar.logTrace(`cleaning ${emptyDirs.length.toLocaleString()} empty director${emptyDirs.length !== 1 ? 'ies' : 'y'}`); if (this.options.getCleanDryRun()) { this.progressBar.logInfo(`paths skipped from cleaning (dry run):\n${emptyDirs.map((filePath) => ` ${filePath}`).join('\n')}`); @@ -102,7 +102,7 @@ export default class DirectoryCleaner extends Module { } catch (error) { this.progressBar.logWarn(`failed to recycle ${filePathsChunk.length} path${filePathsChunk.length !== 1 ? 's' : ''}: ${error}`); } - await this.progressBar.update(i); + this.progressBar.update(i); } // ...but if that doesn't work, delete the leftovers @@ -110,15 +110,19 @@ export default class DirectoryCleaner extends Module { const existingFilePathsCheck = await Promise.all(filePaths .map(async (filePath) => existSemaphore.runExclusive(async () => fsPoly.exists(filePath)))); const existingFilePaths = filePaths.filter((filePath, idx) => existingFilePathsCheck.at(idx)); + if (existingFilePaths.length > 0) { + this.progressBar.setSymbol(ProgressBarSymbol.DELETING); + } for (let i = 0; i < existingFilePaths.length; i += Defaults.OUTPUT_CLEANER_BATCH_SIZE) { const filePathsChunk = existingFilePaths.slice(i, i + Defaults.OUTPUT_CLEANER_BATCH_SIZE); this.progressBar.logInfo(`deleting cleaned path${filePathsChunk.length !== 1 ? 's' : ''}:\n${filePathsChunk.map((filePath) => ` ${filePath}`).join('\n')}`); - try { - await Promise.all(filePathsChunk - .map(async (filePath) => fsPoly.rm(filePath, { force: true }))); - } catch (error) { - this.progressBar.logWarn(`failed to delete ${filePathsChunk.length} path${filePathsChunk.length !== 1 ? 's' : ''}: ${error}`); - } + await Promise.all(filePathsChunk.map(async (filePath) => { + try { + await fsPoly.rm(filePath, { force: true }); + } catch (error) { + this.progressBar.logError(`${filePath}: failed to delete: ${error}`); + } + })); } } @@ -144,7 +148,7 @@ export default class DirectoryCleaner extends Module { } catch (error) { this.progressBar.logWarn(`failed to move ${filePath} -> ${backupPath}: ${error}`); } - await this.progressBar.incrementProgress(); + this.progressBar.incrementProgress(); }); })); } diff --git a/src/modules/fixdatCreator.ts b/src/modules/fixdatCreator.ts index 0ec411e15..39a731796 100644 --- a/src/modules/fixdatCreator.ts +++ b/src/modules/fixdatCreator.ts @@ -38,8 +38,8 @@ export default class FixdatCreator extends Module { } this.progressBar.logTrace(`${originalDat.getNameShort()}: generating a fixdat`); - await this.progressBar.setSymbol(ProgressBarSymbol.WRITING); - await this.progressBar.reset(1); + this.progressBar.setSymbol(ProgressBarSymbol.WRITING); + this.progressBar.reset(1); // Create an easily searchable index of every ROM that has a ReleaseCandidate const writtenRomHashCodes = new Set([...parentsToCandidates.values()] @@ -60,7 +60,7 @@ export default class FixdatCreator extends Module { try { fixdatDir = this.getDatOutputDirRoot(originalDat); } catch (error) { - this.progressBar.logWarn(`failed to: ${error}`); + this.progressBar.logWarn(`${originalDat.getNameShort()}: failed to get output directory: ${error}`); } } if (!await fsPoly.exists(fixdatDir)) { diff --git a/src/modules/module.ts b/src/modules/module.ts index 6ca10e390..cf57b3074 100644 --- a/src/modules/module.ts +++ b/src/modules/module.ts @@ -7,6 +7,7 @@ export default abstract class Module { protected readonly progressBar: ProgressBar; protected constructor(progressBar: ProgressBar, loggerPrefix: string) { - this.progressBar = progressBar.withLoggerPrefix(loggerPrefix); + this.progressBar = progressBar; + this.progressBar.setLoggerPrefix(loggerPrefix); } } diff --git a/src/modules/movedRomDeleter.ts b/src/modules/movedRomDeleter.ts index 27fc5fe80..d9ad7c6c1 100644 --- a/src/modules/movedRomDeleter.ts +++ b/src/modules/movedRomDeleter.ts @@ -1,4 +1,7 @@ +import { Semaphore } from 'async-mutex'; + import ProgressBar, { ProgressBarSymbol } from '../console/progressBar.js'; +import Defaults from '../globals/defaults.js'; import ArrayPoly from '../polyfill/arrayPoly.js'; import fsPoly from '../polyfill/fsPoly.js'; import DAT from '../types/dats/dat.js'; @@ -31,8 +34,8 @@ export default class MovedROMDeleter extends Module { } this.progressBar.logTrace('deleting moved ROMs'); - await this.progressBar.setSymbol(ProgressBarSymbol.FILTERING); - await this.progressBar.reset(movedRoms.length); + this.progressBar.setSymbol(ProgressBarSymbol.CANDIDATE_FILTERING); + this.progressBar.reset(movedRoms.length); const fullyConsumedFiles = this.filterOutPartiallyConsumedArchives(movedRoms, inputRoms); @@ -41,21 +44,31 @@ export default class MovedROMDeleter extends Module { datsToWrittenFiles, ); - await this.progressBar.setSymbol(ProgressBarSymbol.DELETING); - await this.progressBar.reset(filePathsToDelete.length); - this.progressBar.logTrace(`deleting ${filePathsToDelete.length.toLocaleString()} moved file${filePathsToDelete.length !== 1 ? 's' : ''}`); - - await Promise.all(filePathsToDelete.map(async (filePath) => { - this.progressBar.logInfo(`deleting moved file: ${filePath}`); - try { - await fsPoly.rm(filePath, { force: true }); - } catch { - this.progressBar.logError(`${filePath}: failed to delete`); - } - })); + const existSemaphore = new Semaphore(Defaults.OUTPUT_CLEANER_BATCH_SIZE); + const existingFilePathsCheck = await Promise.all(filePathsToDelete + .map(async (filePath) => existSemaphore.runExclusive(async () => fsPoly.exists(filePath)))); + const existingFilePaths = filePathsToDelete + .filter((filePath, idx) => existingFilePathsCheck.at(idx)); + + this.progressBar.setSymbol(ProgressBarSymbol.DELETING); + this.progressBar.reset(existingFilePaths.length); + this.progressBar.logTrace(`deleting ${existingFilePaths.length.toLocaleString()} moved file${existingFilePaths.length !== 1 ? 's' : ''}`); + + const filePathChunks = existingFilePaths + .reduce(ArrayPoly.reduceChunk(Defaults.OUTPUT_CLEANER_BATCH_SIZE), []); + for (const filePathChunk of filePathChunks) { + this.progressBar.logInfo(`deleting moved file${filePathChunk.length !== 1 ? 's' : ''}:\n${filePathChunk.map((filePath) => ` ${filePath}`).join('\n')}`); + await Promise.all(filePathChunk.map(async (filePath) => { + try { + await fsPoly.rm(filePath, { force: true }); + } catch (error) { + this.progressBar.logError(`${filePath}: failed to delete: ${error}`); + } + })); + } this.progressBar.logTrace('done deleting moved ROMs'); - return filePathsToDelete; + return existingFilePaths; } /** @@ -123,7 +136,7 @@ export default class MovedROMDeleter extends Module { return filePath; }) - .filter(ArrayPoly.filterNotNullish); + .filter((filePath) => filePath !== undefined); } private static groupFilesByFilePath(files: File[]): Map { diff --git a/src/modules/patchScanner.ts b/src/modules/patchScanner.ts index 3c6b4b701..9e1577571 100644 --- a/src/modules/patchScanner.ts +++ b/src/modules/patchScanner.ts @@ -1,8 +1,8 @@ import ProgressBar, { ProgressBarSymbol } from '../console/progressBar.js'; import DriveSemaphore from '../driveSemaphore.js'; -import ArrayPoly from '../polyfill/arrayPoly.js'; import File from '../types/files/file.js'; import { ChecksumBitmask } from '../types/files/fileChecksums.js'; +import FileFactory from '../types/files/fileFactory.js'; import Options from '../types/options.js'; import Patch from '../types/patches/patch.js'; import PatchFactory from '../types/patches/patchFactory.js'; @@ -12,8 +12,8 @@ import Scanner from './scanner.js'; * Scan for {@link Patch}es and parse them into the correct supported type. */ export default class PatchScanner extends Scanner { - constructor(options: Options, progressBar: ProgressBar) { - super(options, progressBar, PatchScanner.name); + constructor(options: Options, progressBar: ProgressBar, fileFactory: FileFactory) { + super(options, progressBar, fileFactory, PatchScanner.name); } /** @@ -21,26 +21,26 @@ export default class PatchScanner extends Scanner { */ async scan(): Promise { this.progressBar.logTrace('scanning patch files'); - await this.progressBar.setSymbol(ProgressBarSymbol.SEARCHING); - await this.progressBar.reset(0); + this.progressBar.setSymbol(ProgressBarSymbol.FILE_SCANNING); + this.progressBar.reset(0); - const patchFilePaths = await this.options.scanPatchFilesWithoutExclusions(async (increment) => { - await this.progressBar.incrementTotal(increment); + const patchFilePaths = await this.options.scanPatchFilesWithoutExclusions((increment) => { + this.progressBar.incrementTotal(increment); }); this.progressBar.logTrace(`found ${patchFilePaths.length.toLocaleString()} patch file${patchFilePaths.length !== 1 ? 's' : ''}`); - await this.progressBar.reset(patchFilePaths.length); + this.progressBar.reset(patchFilePaths.length); - const files = await this.getUniqueFilesFromPaths( + const patchFiles = await this.getUniqueFilesFromPaths( patchFilePaths, this.options.getReaderThreads(), ChecksumBitmask.NONE, ); - await this.progressBar.reset(files.length); + this.progressBar.reset(patchFiles.length); const patches = (await new DriveSemaphore(this.options.getReaderThreads()).map( - files, + patchFiles, async (file) => { - await this.progressBar.incrementProgress(); + this.progressBar.incrementProgress(); const waitingMessage = `${file.toString()} ...`; this.progressBar.addWaitingMessage(waitingMessage); @@ -50,11 +50,12 @@ export default class PatchScanner extends Scanner { this.progressBar.logWarn(`${file.toString()}: failed to parse patch: ${error}`); return undefined; } finally { - await this.progressBar.incrementDone(); + this.progressBar.incrementDone(); this.progressBar.removeWaitingMessage(waitingMessage); } }, - )).filter(ArrayPoly.filterNotNullish); + )) + .filter((patch) => patch !== undefined); this.progressBar.logTrace('done scanning patch files'); return patches; diff --git a/src/modules/reportGenerator.ts b/src/modules/reportGenerator.ts index 81bf4abbb..08563db2f 100644 --- a/src/modules/reportGenerator.ts +++ b/src/modules/reportGenerator.ts @@ -87,7 +87,7 @@ export default class ReportGenerator extends Module { this.progressBar.logTrace(`wrote ${datStatuses.length.toLocaleString()} CSV row${datStatuses.length !== 1 ? 's' : ''}: ${reportPath}`); this.progressBar.logTrace('done generating report'); - await this.progressBar.done(reportPath); - await this.progressBar.freeze(); + this.progressBar.done(reportPath); + this.progressBar.freeze(); } } diff --git a/src/modules/romHeaderProcessor.ts b/src/modules/romHeaderProcessor.ts index de9b6fcda..3471a1019 100644 --- a/src/modules/romHeaderProcessor.ts +++ b/src/modules/romHeaderProcessor.ts @@ -2,7 +2,7 @@ import ProgressBar, { ProgressBarSymbol } from '../console/progressBar.js'; import DriveSemaphore from '../driveSemaphore.js'; import ArchiveEntry from '../types/files/archives/archiveEntry.js'; import File from '../types/files/file.js'; -import FileCache from '../types/files/fileCache.js'; +import FileFactory from '../types/files/fileFactory.js'; import ROMHeader from '../types/files/romHeader.js'; import Options from '../types/options.js'; import Module from './module.js'; @@ -14,9 +14,15 @@ import Module from './module.js'; export default class ROMHeaderProcessor extends Module { private readonly options: Options; - constructor(options: Options, progressBar: ProgressBar) { + private readonly fileFactory: FileFactory; + + private readonly driveSemaphore: DriveSemaphore; + + constructor(options: Options, progressBar: ProgressBar, fileFactory: FileFactory) { super(progressBar, ROMHeaderProcessor.name); this.options = options; + this.fileFactory = fileFactory; + this.driveSemaphore = new DriveSemaphore(this.options.getReaderThreads()); } /** @@ -27,14 +33,25 @@ export default class ROMHeaderProcessor extends Module { return inputRomFiles; } - this.progressBar.logTrace('processing file headers'); - await this.progressBar.setSymbol(ProgressBarSymbol.DETECTING_HEADERS); - await this.progressBar.reset(inputRomFiles.length); + const filesThatNeedProcessing = inputRomFiles + .filter((inputFile) => this.fileNeedsProcessing(inputFile)) + .length; + if (filesThatNeedProcessing === 0) { + this.progressBar.logTrace('no ROMs need their header processed'); + return inputRomFiles; + } + + this.progressBar.logTrace(`processing headers in ${filesThatNeedProcessing.toLocaleString()} ROM${filesThatNeedProcessing !== 1 ? 's' : ''}`); + this.progressBar.setSymbol(ProgressBarSymbol.ROM_HEADER_DETECTION); + this.progressBar.reset(filesThatNeedProcessing); + + const parsedFiles = await Promise.all(inputRomFiles.map(async (inputFile) => { + if (!this.fileNeedsProcessing(inputFile)) { + return inputFile; + } - const parsedFiles = await new DriveSemaphore(this.options.getReaderThreads()).map( - inputRomFiles, - async (inputFile) => { - await this.progressBar.incrementProgress(); + return this.driveSemaphore.runExclusive(inputFile, async () => { + this.progressBar.incrementProgress(); const waitingMessage = `${inputFile.toString()} ...`; this.progressBar.addWaitingMessage(waitingMessage); @@ -47,11 +64,11 @@ export default class ROMHeaderProcessor extends Module { } this.progressBar.removeWaitingMessage(waitingMessage); - await this.progressBar.incrementDone(); + this.progressBar.incrementDone(); return fileWithHeader; - }, - ); + }); + })); const headeredRomsCount = parsedFiles.filter((romFile) => romFile.getFileHeader()).length; this.progressBar.logTrace(`found headers in ${headeredRomsCount.toLocaleString()} ROM${headeredRomsCount !== 1 ? 's' : ''}`); @@ -60,7 +77,7 @@ export default class ROMHeaderProcessor extends Module { return parsedFiles; } - private async getFileWithHeader(inputFile: File): Promise { + private fileNeedsProcessing(inputFile: File): boolean { /** * If the input file is from an archive, and we're not zipping or extracting, then we have no * chance to remove the header, so we shouldn't bother detecting one. @@ -70,23 +87,21 @@ export default class ROMHeaderProcessor extends Module { && !this.options.shouldZip() && !this.options.shouldExtract() ) { - return inputFile; + return false; } - // Should get FileHeader from File, try to - if (ROMHeader.headerFromFilename(inputFile.getExtractedFilePath()) !== undefined - || this.options.shouldReadFileForHeader(inputFile.getExtractedFilePath()) - ) { - this.progressBar.logTrace(`${inputFile.toString()}: reading potentially headered file by file contents`); - const headerForFileStream = await FileCache.getOrComputeFileHeader(inputFile); - if (headerForFileStream) { - this.progressBar.logTrace(`${inputFile.toString()}: found header by file contents: ${headerForFileStream.getHeaderedFileExtension()}`); - return inputFile.withFileHeader(headerForFileStream); - } - this.progressBar.logTrace(`${inputFile.toString()}: didn't find header by file contents`); - } + return ROMHeader.headerFromFilename(inputFile.getExtractedFilePath()) !== undefined + || this.options.shouldReadFileForHeader(inputFile.getExtractedFilePath()); + } - // Should not get FileHeader + private async getFileWithHeader(inputFile: File): Promise { + this.progressBar.logTrace(`${inputFile.toString()}: reading potentially headered file by file contents`); + const headerForFileStream = await this.fileFactory.headerFrom(inputFile); + if (headerForFileStream) { + this.progressBar.logTrace(`${inputFile.toString()}: found header by file contents: ${headerForFileStream.getHeaderedFileExtension()}`); + return inputFile.withFileHeader(headerForFileStream); + } + this.progressBar.logTrace(`${inputFile.toString()}: didn't find header by file contents`); return inputFile; } } diff --git a/src/modules/romIndexer.ts b/src/modules/romIndexer.ts index 7fd488122..ec83d7121 100644 --- a/src/modules/romIndexer.ts +++ b/src/modules/romIndexer.ts @@ -3,8 +3,10 @@ import path from 'node:path'; import ProgressBar, { ProgressBarSymbol } from '../console/progressBar.js'; import FsPoly from '../polyfill/fsPoly.js'; import ArchiveEntry from '../types/files/archives/archiveEntry.js'; +import Chd from '../types/files/archives/chd/chd.js'; +import Maxcso from '../types/files/archives/maxcso/maxcso.js'; import Rar from '../types/files/archives/rar.js'; -import SevenZip from '../types/files/archives/sevenZip.js'; +import SevenZip from '../types/files/archives/sevenZip/sevenZip.js'; import Tar from '../types/files/archives/tar.js'; import Zip from '../types/files/archives/zip.js'; import File from '../types/files/file.js'; @@ -27,10 +29,10 @@ export default class ROMIndexer extends Module { /** * Index files. */ - async index(files: File[]): Promise { + index(files: File[]): IndexedFiles { this.progressBar.logTrace(`indexing ${files.length.toLocaleString()} file${files.length !== 1 ? 's' : ''}`); - await this.progressBar.setSymbol(ProgressBarSymbol.INDEXING); - await this.progressBar.reset(files.length); + this.progressBar.setSymbol(ProgressBarSymbol.ROM_INDEXING); + this.progressBar.reset(files.length); // Index the files const result = IndexedFiles.fromFiles(files); @@ -45,12 +47,12 @@ export default class ROMIndexer extends Module { private sortMap(checksumsToFiles: ChecksumsToFiles): void { const outputDir = path.resolve(this.options.getOutputDirRoot()); - const outputDirDisk = FsPoly.disksSync().find((mount) => outputDir.startsWith(mount)); + const outputDirDisk = FsPoly.diskResolved(outputDir); [...checksumsToFiles.values()] .forEach((files) => files .sort((fileOne, fileTwo) => { - // Prefer un-archived files + // Prefer un-archived files because they're less expensive to process const fileOneArchived = ROMIndexer.archiveEntryPriority(fileOne); const fileTwoArchived = ROMIndexer.archiveEntryPriority(fileTwo); if (fileOneArchived !== fileTwoArchived) { @@ -61,6 +63,7 @@ export default class ROMIndexer extends Module { // This is in case the output file is invalid and we're trying to overwrite it with // something else. Otherwise, we'll just attempt to overwrite the invalid output file with // itself, still resulting in an invalid output file. + // TODO(cemmer): only do this when overwriting files in some way? const fileOneInOutput = path.resolve(fileOne.getFilePath()).startsWith(outputDir) ? 1 : 0; const fileTwoInOutput = path.resolve(fileTwo.getFilePath()).startsWith(outputDir) ? 1 : 0; if (fileOneInOutput !== fileTwoInOutput) { @@ -69,6 +72,7 @@ export default class ROMIndexer extends Module { // Then, prefer files that are on the same disk for fs efficiency see {@link FsPoly#mv} if (outputDirDisk) { + // TODO(cemmer): only do this when not copying files? const fileOneInOutputDisk = path.resolve(fileOne.getFilePath()) .startsWith(outputDirDisk) ? 0 : 1; const fileTwoInOutputDisk = path.resolve(fileTwo.getFilePath()) @@ -97,6 +101,10 @@ export default class ROMIndexer extends Module { return 3; } if (file.getArchive() instanceof SevenZip) { return 4; + } if (file.getArchive() instanceof Maxcso) { + return 5; + } if (file.getArchive() instanceof Chd) { + return 6; } return 99; } diff --git a/src/modules/romScanner.ts b/src/modules/romScanner.ts index f84f9f46f..9e625a5e6 100644 --- a/src/modules/romScanner.ts +++ b/src/modules/romScanner.ts @@ -1,6 +1,7 @@ import ProgressBar, { ProgressBarSymbol } from '../console/progressBar.js'; import File from '../types/files/file.js'; import { ChecksumBitmask } from '../types/files/fileChecksums.js'; +import FileFactory from '../types/files/fileFactory.js'; import Options from '../types/options.js'; import Scanner from './scanner.js'; @@ -9,8 +10,8 @@ import Scanner from './scanner.js'; * representation. */ export default class ROMScanner extends Scanner { - constructor(options: Options, progressBar: ProgressBar) { - super(options, progressBar, ROMScanner.name); + constructor(options: Options, progressBar: ProgressBar, fileFactory: FileFactory) { + super(options, progressBar, fileFactory, ROMScanner.name); } /** @@ -21,14 +22,15 @@ export default class ROMScanner extends Scanner { checksumArchives = false, ): Promise { this.progressBar.logTrace('scanning ROM files'); - await this.progressBar.setSymbol(ProgressBarSymbol.SEARCHING); - await this.progressBar.reset(0); + this.progressBar.setSymbol(ProgressBarSymbol.FILE_SCANNING); + this.progressBar.reset(0); - const romFilePaths = await this.options.scanInputFilesWithoutExclusions(async (increment) => { - await this.progressBar.incrementTotal(increment); + const romFilePaths = await this.options.scanInputFilesWithoutExclusions((increment) => { + this.progressBar.incrementTotal(increment); }); this.progressBar.logTrace(`found ${romFilePaths.length.toLocaleString()} ROM file${romFilePaths.length !== 1 ? 's' : ''}`); - await this.progressBar.reset(romFilePaths.length); + this.progressBar.setSymbol(ProgressBarSymbol.ROM_HASHING); + this.progressBar.reset(romFilePaths.length); const files = await this.getFilesFromPaths( romFilePaths, diff --git a/src/modules/scanner.ts b/src/modules/scanner.ts index 191a4a797..03bf7eb2b 100644 --- a/src/modules/scanner.ts +++ b/src/modules/scanner.ts @@ -1,11 +1,15 @@ +import { CHDInfo, CHDType } from 'chdman'; + import ProgressBar from '../console/progressBar.js'; import DriveSemaphore from '../driveSemaphore.js'; -import ElasticSemaphore from '../elasticSemaphore.js'; -import Defaults from '../globals/defaults.js'; import ArrayPoly from '../polyfill/arrayPoly.js'; import fsPoly from '../polyfill/fsPoly.js'; import ArchiveEntry from '../types/files/archives/archiveEntry.js'; +import Chd from '../types/files/archives/chd/chd.js'; +import Gzip from '../types/files/archives/sevenZip/gzip.js'; +import Tar from '../types/files/archives/tar.js'; import File from '../types/files/file.js'; +import { ChecksumBitmask } from '../types/files/fileChecksums.js'; import FileFactory from '../types/files/fileFactory.js'; import Options from '../types/options.js'; import Module from './module.js'; @@ -14,17 +18,19 @@ import Module from './module.js'; * The base class for every input file scanner class. */ export default abstract class Scanner extends Module { - // WARN(cemmer): there is an undocumented semaphore max value that can be used, the full - // 4,700,372,992 bytes of a DVD+R will cause runExclusive() to never run or return. - private static readonly FILESIZE_SEMAPHORE = new ElasticSemaphore( - Defaults.MAX_READ_WRITE_CONCURRENT_KILOBYTES, - ); - protected readonly options: Options; - protected constructor(options: Options, progressBar: ProgressBar, loggerPrefix: string) { + private readonly fileFactory: FileFactory; + + protected constructor( + options: Options, + progressBar: ProgressBar, + fileFactory: FileFactory, + loggerPrefix: string, + ) { super(progressBar, loggerPrefix); this.options = options; + this.fileFactory = fileFactory; } protected async getFilesFromPaths( @@ -36,14 +42,15 @@ export default abstract class Scanner extends Module { return (await new DriveSemaphore(threads).map( filePaths, async (inputFile) => { - await this.progressBar.incrementProgress(); + this.progressBar.incrementProgress(); const waitingMessage = `${inputFile} ...`; this.progressBar.addWaitingMessage(waitingMessage); const files = await this.getFilesFromPath(inputFile, checksumBitmask, checksumArchives); + await this.logWarnings(files); this.progressBar.removeWaitingMessage(waitingMessage); - await this.progressBar.incrementDone(); + this.progressBar.incrementDone(); return files; }, )).flat(); @@ -65,36 +72,62 @@ export default abstract class Scanner extends Module { checksumArchives = false, ): Promise { try { - const totalKilobytes = await fsPoly.size(filePath) / 1024; - const files = await Scanner.FILESIZE_SEMAPHORE.runExclusive( - async () => { - if (await fsPoly.isSymlink(filePath)) { - const realFilePath = await fsPoly.readlinkResolved(filePath); - if (!await fsPoly.exists(realFilePath)) { - this.progressBar.logWarn(`${filePath}: broken symlink, '${realFilePath}' doesn't exist`); - return []; - } - } - - const filesFromPath = await FileFactory.filesFrom(filePath, checksumBitmask); - - const fileIsArchive = filesFromPath.some((file) => file instanceof ArchiveEntry); - if (checksumArchives && fileIsArchive) { - filesFromPath.push(await FileFactory.fileFrom(filePath, checksumBitmask)); - } - - return filesFromPath; - }, - totalKilobytes, + if (await fsPoly.isSymlink(filePath)) { + const realFilePath = await fsPoly.readlinkResolved(filePath); + if (!await fsPoly.exists(realFilePath)) { + this.progressBar.logWarn(`${filePath}: broken symlink, '${realFilePath}' doesn't exist`); + return []; + } + } + + const filesFromPath = await this.fileFactory.filesFrom( + filePath, + checksumBitmask, + this.options.getInputChecksumQuick() ? ChecksumBitmask.NONE : checksumBitmask, ); - if (files.length === 0) { + const fileIsArchive = filesFromPath.some((file) => file instanceof ArchiveEntry); + if (checksumArchives && fileIsArchive) { + filesFromPath.push(await this.fileFactory.fileFrom(filePath, checksumBitmask)); + } + + if (filesFromPath.length === 0) { this.progressBar.logWarn(`${filePath}: found no files in path`); } - return files; + return filesFromPath; } catch (error) { this.progressBar.logError(`${filePath}: failed to parse file: ${error}`); return []; } } + + private async logWarnings(files: File[]): Promise { + if (this.options.getInputChecksumQuick()) { + const archiveWithoutChecksums = files + .filter((file) => file instanceof ArchiveEntry) + .map((archiveEntry) => archiveEntry.getArchive()) + .find((archive) => archive instanceof Gzip || archive instanceof Tar); + if (archiveWithoutChecksums !== undefined) { + this.progressBar.logWarn(`${archiveWithoutChecksums.getFilePath()}: quick checksums will skip ${archiveWithoutChecksums.getExtension()} files`); + return; + } + + const chdInfos = await Promise.all(files + .filter((file) => file instanceof ArchiveEntry) + .map((archiveEntry) => archiveEntry.getArchive()) + .filter((archive) => archive instanceof Chd) + .map(async (chd) => ([chd, await chd.getInfo()] satisfies [Chd, CHDInfo]))); + + const cdRom = chdInfos.find(([, info]) => info.type === CHDType.CD_ROM); + if (cdRom !== undefined) { + this.progressBar.logWarn(`${cdRom[0].getFilePath()}: quick checksums will skip .cue/.bin files in CD-ROM CHDs`); + return; + } + + const gdRom = chdInfos.find(([, info]) => info.type === CHDType.GD_ROM); + if (gdRom !== undefined) { + this.progressBar.logWarn(`${gdRom[0].getFilePath()}: quick checksums will skip .gdi/.bin/.raw files in GD-ROM CHDs`); + } + } + } } diff --git a/src/polyfill/arrayPoly.ts b/src/polyfill/arrayPoly.ts index 063a01647..9ecc430e8 100644 --- a/src/polyfill/arrayPoly.ts +++ b/src/polyfill/arrayPoly.ts @@ -1,22 +1,4 @@ export default class ArrayPoly { - /** - * Filter out nullish values from an array, in a way that TypeScript can understand how the - * resulting element type has changed. For example, TypeScript (as of v5.1.3) will tell you that: - * - * - * [1, 2, undefined, 4, undefined].filter((val) => val); - * - * [1, 2, undefined, 4, undefined].filter(ArrayPoly.filterNotNullish); - * - */ - public static filterNotNullish(value: T | null | undefined): value is T { - return value !== null && value !== undefined; - } - /** * Filter elements in an array to only unique values, using the result of a mapper function to * test for equality. Usage: @@ -29,7 +11,12 @@ export default class ArrayPoly { mapper: (arg: T) => V, ): (value: T, idx: number, values: T[]) => boolean { const seenMappedValues = new Set(); - return (value: T): boolean => { + return (value, idx, values): boolean => { + if (values.length <= 1) { + // Arrays that are empty or only have one element are already unique + return true; + } + const mapped = mapper(value); if (!seenMappedValues.has(mapped)) { seenMappedValues.add(mapped); @@ -76,6 +63,11 @@ export default class ArrayPoly { */ public static reduceUnique(): (previous: T[], current: T, idx: number, array: T[]) => T[] { return (previous: T[], current: T, idx: number, array: T[]): T[] => { + if (array.length <= 1) { + // Arrays that are empty or only have one element are already unique + return array; + } + if (idx === 0) { return [...new Set(array)]; } diff --git a/src/polyfill/fsPoly.ts b/src/polyfill/fsPoly.ts index a387e830e..5718c60c5 100644 --- a/src/polyfill/fsPoly.ts +++ b/src/polyfill/fsPoly.ts @@ -8,9 +8,9 @@ import util from 'node:util'; import { isNotJunk } from 'junk'; import nodeDiskInfo from 'node-disk-info'; +import { Memoize } from 'typescript-memoize'; import ExpectedError from '../types/expectedError.js'; -import ArrayPoly from './arrayPoly.js'; export type FsWalkCallback = (increment: number) => void; @@ -18,13 +18,31 @@ export default class FsPoly { static readonly FILE_READING_CHUNK_SIZE = 64 * 1024; // 64KiB, Node.js v22 default // Assume that all drives we're reading from or writing to were already mounted at startup - public static readonly DRIVES = nodeDiskInfo.getDiskInfoSync(); + private static readonly DRIVES = nodeDiskInfo.getDiskInfoSync(); - static async canSymlink(tempDir: string): Promise { - const source = await this.mktemp(path.join(tempDir, 'source')); + static async canHardlink(dirPath: string): Promise { + const source = await this.mktemp(path.join(dirPath, 'source')); try { await this.touch(source); - const target = await this.mktemp(path.join(tempDir, 'target')); + const target = await this.mktemp(path.join(dirPath, 'target')); + try { + await this.hardlink(source, target); + return await this.exists(target); + } finally { + await this.rm(target, { force: true }); + } + } catch { + return false; + } finally { + await this.rm(source, { force: true }); + } + } + + static async canSymlink(dirPath: string): Promise { + const source = await this.mktemp(path.join(dirPath, 'source')); + try { + await this.touch(source); + const target = await this.mktemp(path.join(dirPath, 'target')); try { await this.symlink(source, target); return await this.exists(target); @@ -78,10 +96,17 @@ export default class FsPoly { return (await Promise.all( readDir.map(async (filePath) => (await this.isDirectory(filePath) ? filePath : undefined)), - )).filter(ArrayPoly.filterNotNullish); + )) + .filter((childDir) => childDir !== undefined); } - static disksSync(): string[] { + static diskResolved(filePath: string): string | undefined { + const filePathResolved = path.resolve(filePath); + return this.disksSync().find((mountPath) => filePathResolved.startsWith(mountPath)); + } + + @Memoize() + private static disksSync(): string[] { return FsPoly.DRIVES .filter((drive) => drive.available > 0) .map((drive) => drive.mounted) @@ -127,6 +152,19 @@ export default class FsPoly { } } + static isDirectorySync(pathLike: string): boolean { + try { + const lstat = fs.lstatSync(pathLike); + if (lstat.isSymbolicLink()) { + const link = this.readlinkResolvedSync(pathLike); + return this.isDirectorySync(link); + } + return lstat.isDirectory(); + } catch { + return false; + } + } + static async isExecutable(pathLike: PathLike): Promise { try { await fs.promises.access(pathLike, fs.constants.X_OK); @@ -136,6 +174,14 @@ export default class FsPoly { } } + static async isHardlink(pathLike: PathLike): Promise { + try { + return (await this.stat(pathLike)).nlink > 1; + } catch { + return false; + } + } + static isSamba(filePath: string): boolean { const normalizedPath = filePath.replace(/[\\/]/g, path.sep); if (normalizedPath.startsWith(`${path.sep}${path.sep}`) && normalizedPath !== os.devNull) { @@ -163,6 +209,14 @@ export default class FsPoly { } } + static isSymlinkSync(pathLike: PathLike): boolean { + try { + return fs.lstatSync(pathLike).isSymbolicLink(); + } catch { + return false; + } + } + static async isWritable(filePath: string): Promise { const exists = await this.exists(filePath); try { @@ -172,7 +226,7 @@ export default class FsPoly { return false; } finally { if (!exists) { - await this.rm(filePath); + await this.rm(filePath, { force: true }); } } } @@ -227,23 +281,6 @@ export default class FsPoly { } static async mv(oldPath: string, newPath: string, attempt = 1): Promise { - /** - * WARN(cemmer): {@link fs.rename} appears to be VERY memory intensive when copying across - * drives! Instead, we'll use stream piping to keep memory usage low. - */ - if (this.onDifferentDrives(oldPath, newPath)) { - const read = fs.createReadStream(oldPath, { - highWaterMark: this.FILE_READING_CHUNK_SIZE, - }); - await new Promise((resolve, reject) => { - const write = fs.createWriteStream(newPath); - write.on('close', resolve); - write.on('error', reject); - read.pipe(write); - }); - return this.rm(oldPath, { force: true }); - } - try { return await fs.promises.rename(oldPath, newPath); } catch (error) { @@ -253,11 +290,11 @@ export default class FsPoly { } // Backoff with jitter - if (attempt >= 3) { + if (attempt >= 5) { throw error; } await new Promise((resolve) => { - setTimeout(resolve, Math.random() * (2 ** (attempt - 1) * 100)); + setTimeout(resolve, Math.random() * (2 ** (attempt - 1) * 10)); }); // Attempt to resolve Windows' "EBUSY: resource busy or locked" @@ -267,14 +304,10 @@ export default class FsPoly { } private static onDifferentDrives(one: string, two: string): boolean { - const oneResolved = path.resolve(one); - const twoResolved = path.resolve(two); - if (path.dirname(oneResolved) === path.dirname(twoResolved)) { + if (path.dirname(one) === path.dirname(two)) { return false; } - const driveMounts = this.disksSync(); - return driveMounts.find((mount) => oneResolved.startsWith(mount)) - !== driveMounts.find((mount) => twoResolved.startsWith(mount)); + return this.diskResolved(one) !== this.diskResolved(two); } static async readlink(pathLike: PathLike): Promise { @@ -284,6 +317,13 @@ export default class FsPoly { return fs.promises.readlink(pathLike); } + static readlinkSync(pathLike: PathLike): string { + if (!this.isSymlinkSync(pathLike)) { + throw new ExpectedError(`can't readlink of non-symlink: ${pathLike}`); + } + return fs.readlinkSync(pathLike); + } + static async readlinkResolved(link: string): Promise { const source = await this.readlink(link); if (path.isAbsolute(source)) { @@ -292,6 +332,14 @@ export default class FsPoly { return path.join(path.dirname(link), source); } + static readlinkResolvedSync(link: string): string { + const source = this.readlinkSync(link); + if (path.isAbsolute(source)) { + return source; + } + return path.join(path.dirname(link), source); + } + static async realpath(pathLike: PathLike): Promise { if (!await this.exists(pathLike)) { throw new ExpectedError(`can't get realpath of non-existent path: ${pathLike}`); @@ -311,7 +359,7 @@ export default class FsPoly { if (optionsWithRetry?.force) { return; } - throw new Error(`can't rm, path doesn't exist: ${pathLike}`); + throw new ExpectedError(`can't rm, path doesn't exist: ${pathLike}`); } if (await this.isDirectory(pathLike)) { @@ -324,6 +372,31 @@ export default class FsPoly { } } + static rmSync(pathLike: string, options: RmOptions = {}): void { + const optionsWithRetry = { + maxRetries: 2, + ...options, + }; + + try { + fs.accessSync(pathLike); + } catch { + if (optionsWithRetry?.force) { + return; + } + throw new ExpectedError(`can't rmSync, path doesn't exist: ${pathLike}`); + } + + if (this.isDirectorySync(pathLike)) { + fs.rmSync(pathLike, { + ...optionsWithRetry, + recursive: true, + }); + } else { + fs.unlinkSync(pathLike); + } + } + /** * Note: this will follow symlinks and get the size of the target. */ @@ -382,22 +455,6 @@ export default class FsPoly { await file.close(); } - static touchSync(filePath: string): void { - const dirname = path.dirname(filePath); - if (!fs.existsSync(dirname)) { - fs.mkdirSync(dirname, { recursive: true }); - } - - // Create the file if it doesn't already exist - const file = fs.openSync(filePath, 'a'); - - // Ensure the file's `atime` and `mtime` are updated - const date = new Date(); - fs.futimesSync(file, date, date); - - fs.closeSync(file); - } - static async walk(pathLike: PathLike, callback?: FsWalkCallback): Promise { let output: string[] = []; @@ -409,23 +466,30 @@ export default class FsPoly { return []; } - if (callback) { - callback(entries.length); - } - - // TODO(cemmer): `Promise.all()` this? - for (const entry of entries) { + const entryIsDirectory = await Promise.all(entries.map(async (entry) => { const fullPath = path.join(pathLike.toString(), entry.name); - if (entry.isDirectory() || (entry.isSymbolicLink() && await this.isDirectory(fullPath))) { - const subDirFiles = await this.walk(fullPath); - output = [...output, ...subDirFiles]; - if (callback) { - callback(subDirFiles.length - 1); - } - } else { - output = [...output, fullPath]; + return entry.isDirectory() || (entry.isSymbolicLink() && await this.isDirectory(fullPath)); + })); + + // Depth-first search directories first + const directories = entries + .filter((entry, idx) => entryIsDirectory[idx]) + .map((entry) => path.join(pathLike.toString(), entry.name)); + for (const directory of directories) { + const subDirFiles = await this.walk(directory); + if (callback) { + callback(subDirFiles.length); } + output = [...output, ...subDirFiles]; + } + + const files = entries + .filter((entry, idx) => !entryIsDirectory[idx]) + .map((entry) => path.join(pathLike.toString(), entry.name)); + if (callback) { + callback(files.length); } + output = [...output, ...files]; return output; } diff --git a/src/polyfill/timePoly.ts b/src/polyfill/timePoly.ts new file mode 100644 index 000000000..2dc414fda --- /dev/null +++ b/src/polyfill/timePoly.ts @@ -0,0 +1,14 @@ +export default { + hrtimeMillis(timeMillis?: number): number { + const hrtime = timeMillis !== undefined + ? [ + // ms -> s + Math.floor(timeMillis / 1000), + // ms -> ns + (timeMillis - Math.floor(timeMillis / 1000) * 1000) * 1_000_000, + ] satisfies [number, number] : undefined; + + const [sec, nano] = process.hrtime(hrtime); + return sec * 1000 + Math.floor(nano / 1_000_000); + }, +}; diff --git a/src/polyfill/urlPoly.ts b/src/polyfill/urlPoly.ts index 1d31d803d..1c9c34086 100644 --- a/src/polyfill/urlPoly.ts +++ b/src/polyfill/urlPoly.ts @@ -4,9 +4,9 @@ export default { */ canParse(input: string, base?: string): boolean { try { - // eslint-disable-next-line no-new - new URL(input, base); - return true; + const url = new URL(input, base); + // Try to detect and ignore Windows drive letters + return process.platform !== 'win32' || url.protocol.length > 2; } catch { return false; } diff --git a/src/timer.ts b/src/timer.ts index c7b9ebcc5..48b13a7e8 100644 --- a/src/timer.ts +++ b/src/timer.ts @@ -8,22 +8,31 @@ export default class Timer { private readonly timeoutId: NodeJS.Timeout; - private constructor( + private constructor(timeoutId: NodeJS.Timeout) { + this.timeoutId = timeoutId; + Timer.TIMERS.add(this); + } + + static setTimeout( runnable: (...args: unknown[]) => void, timeoutMillis: number, - ) { - this.timeoutId = setTimeout(() => { + ): Timer { + const timer = new Timer(setTimeout(() => { runnable(); - Timer.TIMERS.delete(this); - }, timeoutMillis); - Timer.TIMERS.add(this); + Timer.TIMERS.delete(timer); + }, timeoutMillis)); + return timer; } - static setTimeout( + static setInterval( runnable: (...args: unknown[]) => void, timeoutMillis: number, ): Timer { - return new Timer(runnable, timeoutMillis); + const timer = new Timer(setInterval(() => { + runnable(); + Timer.TIMERS.delete(timer); + }, timeoutMillis)); + return timer; } /** diff --git a/src/types/cache.ts b/src/types/cache.ts index dccc88f4e..56cf61f6f 100644 --- a/src/types/cache.ts +++ b/src/types/cache.ts @@ -1,22 +1,18 @@ import fs from 'node:fs'; import path from 'node:path'; -import util from 'node:util'; +import * as v8 from 'node:v8'; import * as zlib from 'node:zlib'; -import { Mutex } from 'async-mutex'; +import { E_CANCELED, Mutex } from 'async-mutex'; +import KeyedMutex from '../keyedMutex.js'; import FsPoly from '../polyfill/fsPoly.js'; import Timer from '../timer.js'; -interface CacheData { - data: string, -} - export interface CacheProps { filePath?: string, fileFlushMillis?: number, saveOnExit?: boolean, - maxSize?: number, } /** @@ -25,17 +21,9 @@ export interface CacheProps { export default class Cache { private static readonly BUFFER_ENCODING: BufferEncoding = 'binary'; - private static readonly KEY_MUTEXES_MAX_COUNT = 1000; - - private keyOrder: Set = new Set(); - private keyValues = new Map(); - private readonly keyMutexes = new Map(); - - private keyMutexesLru: Set = new Set(); - - private readonly keyMutexesMutex = new Mutex(); + private readonly keyedMutex = new KeyedMutex(1000); private hasChanged: boolean = false; @@ -45,7 +33,7 @@ export default class Cache { readonly fileFlushMillis?: number; - readonly maxSize?: number; + private readonly saveMutex = new Mutex(); constructor(props?: CacheProps) { this.filePath = props?.filePath; @@ -56,14 +44,13 @@ export default class Cache { await this.save(); }); } - this.maxSize = props?.maxSize; } /** * Return if a key exists in the cache, waiting for any existing operations to complete first. */ public async has(key: string): Promise { - return this.lockKey(key, () => this.keyValues.has(key)); + return this.keyedMutex.runExclusiveForKey(key, () => this.keyValues.has(key)); } /** @@ -84,7 +71,7 @@ export default class Cache { * Get the value of a key in the cache, waiting for any existing operations to complete first. */ public async get(key: string): Promise { - return this.lockKey(key, () => this.keyValues.get(key)); + return this.keyedMutex.runExclusiveForKey(key, () => this.keyValues.get(key)); } /** @@ -96,7 +83,7 @@ export default class Cache { runnable: (key: string) => V | Promise, shouldRecompute?: (value: V) => boolean | Promise, ): Promise { - return this.lockKey(key, async () => { + return this.keyedMutex.runExclusiveForKey(key, async () => { if (this.keyValues.has(key)) { const existingValue = this.keyValues.get(key) as V; if (shouldRecompute === undefined || !await shouldRecompute(existingValue)) { @@ -114,20 +101,14 @@ export default class Cache { * Set the value of a key in the cache. */ public async set(key: string, val: V): Promise { - return this.lockKey(key, () => this.setUnsafe(key, val)); + return this.keyedMutex.runExclusiveForKey(key, () => this.setUnsafe(key, val)); } private setUnsafe(key: string, val: V): void { - if (this.maxSize !== undefined && !this.keyValues.has(key)) { - this.keyOrder.add(key); - } + const oldVal = this.keyValues.get(key); this.keyValues.set(key, val); - this.saveWithTimeout(); - - // Evict old values (FIFO) - if (this.maxSize !== undefined && this.keyValues.size > this.maxSize) { - const staleKey = this.keyOrder.keys().next().value; - this.deleteUnsafe(staleKey); + if (val !== oldVal) { + this.saveWithTimeout(); } } @@ -143,47 +124,16 @@ export default class Cache { } // Note: avoiding lockKey() because it could get expensive with many keys to delete - await this.keyMutexesMutex.runExclusive(() => { + await this.keyedMutex.runExclusiveGlobally(() => { keysToDelete.forEach((k) => this.deleteUnsafe(k)); }); } private deleteUnsafe(key: string): void { - this.keyOrder.delete(key); this.keyValues.delete(key); - this.keyMutexes.delete(key); - this.keyMutexesLru.delete(key); this.saveWithTimeout(); } - private async lockKey(key: string, runnable: () => (R | Promise)): Promise { - // Get a mutex for `key` - const keyMutex = await this.keyMutexesMutex.runExclusive(() => { - if (!this.keyMutexes.has(key)) { - this.keyMutexes.set(key, new Mutex()); - this.keyMutexesLru = new Set([key, ...this.keyMutexesLru]); - - // Expire least recently used keys - [...this.keyMutexesLru] - .filter((lruKey) => !this.keyMutexes.get(lruKey)?.isLocked()) - .slice(Cache.KEY_MUTEXES_MAX_COUNT) - .forEach((lruKey) => { - this.keyMutexes.delete(lruKey); - this.keyMutexesLru.delete(lruKey); - }); - } - - // Mark this key as recently used - this.keyMutexesLru.delete(key); - this.keyMutexesLru = new Set([key, ...this.keyMutexesLru]); - - return this.keyMutexes.get(key) as Mutex; - }); - - // Only allow one concurrent fetch/compute for `key` - return keyMutex.runExclusive(async () => runnable()); - } - /** * Load the cache from a file. */ @@ -194,17 +144,15 @@ export default class Cache { } try { - const cacheData = JSON.parse( - await fs.promises.readFile(this.filePath, { encoding: Cache.BUFFER_ENCODING }), - ) as CacheData; - const compressed = Buffer.from(cacheData.data, Cache.BUFFER_ENCODING); - const decompressed = await util.promisify(zlib.inflate)(compressed); - const keyValuesObject = JSON.parse(decompressed.toString(Cache.BUFFER_ENCODING)); + const compressed = await fs.promises.readFile(this.filePath); + if (compressed.length === 0) { + return this; + } + // NOTE(cemmer): util.promisify(zlib.inflate) seems to have issues not throwing correctly + const decompressed = zlib.inflateSync(compressed); + const keyValuesObject = v8.deserialize(decompressed); const keyValuesEntries = Object.entries(keyValuesObject) as [string, V][]; this.keyValues = new Map(keyValuesEntries); - if (this.maxSize !== undefined) { - this.keyOrder = new Set(Object.keys(keyValuesObject)); - } } catch { /* ignored */ } return this; @@ -226,37 +174,58 @@ export default class Cache { * Save the cache to a file. */ public async save(): Promise { - // Clear any existing timeout - if (this.saveToFileTimeout !== undefined) { - this.saveToFileTimeout.cancel(); - this.saveToFileTimeout = undefined; - } + try { + await this.saveMutex.runExclusive(async () => { + // Clear any existing timeout + if (this.saveToFileTimeout !== undefined) { + this.saveToFileTimeout.cancel(); + this.saveToFileTimeout = undefined; + } - if (this.filePath === undefined || !this.hasChanged) { - return; - } + if (this.filePath === undefined || !this.hasChanged) { + return; + } - const keyValuesObject = Object.fromEntries(this.keyValues); - const decompressed = JSON.stringify(keyValuesObject); - const compressed = await util.promisify(zlib.deflate)(decompressed); - const cacheData = { - data: compressed.toString(Cache.BUFFER_ENCODING), - } satisfies CacheData; - - // Ensure the directory exists - const dirPath = path.dirname(this.filePath); - if (!await FsPoly.exists(dirPath)) { - await FsPoly.mkdir(dirPath, { recursive: true }); - } + const keyValuesObject = Object.fromEntries(this.keyValues); + const decompressed = v8.serialize(keyValuesObject); + // NOTE(cemmer): util.promisify(zlib.deflate) seems to have issues not throwing correctly + const compressed = zlib.deflateSync(decompressed); - // Write to a temp file first, then overwrite the old cache file - const tempFile = await FsPoly.mktemp(this.filePath); - await FsPoly.writeFile( - tempFile, - JSON.stringify(cacheData), - { encoding: Cache.BUFFER_ENCODING }, - ); - await FsPoly.mv(tempFile, this.filePath); - this.hasChanged = false; + // Ensure the directory exists + const dirPath = path.dirname(this.filePath); + if (!await FsPoly.exists(dirPath)) { + await FsPoly.mkdir(dirPath, { recursive: true }); + } + + // Write to a temp file first + const tempFile = await FsPoly.mktemp(this.filePath); + await FsPoly.writeFile( + tempFile, + compressed, + { encoding: Cache.BUFFER_ENCODING }, + ); + + // Validate the file was written correctly + const tempFileCache = await new Cache({ filePath: tempFile }).load(); + if (tempFileCache.size() !== Object.keys(keyValuesObject).length) { + // The written file is bad, don't use it + await FsPoly.rm(tempFile, { force: true }); + return; + } + + // Overwrite the real file with the temp file + try { + await FsPoly.mv(tempFile, this.filePath); + } catch { + return; + } + this.hasChanged = false; + this.saveMutex.cancel(); // cancel all waiting locks, we just saved + }); + } catch (error) { + if (error !== E_CANCELED) { + throw error; + } + } } } diff --git a/src/types/datStatus.ts b/src/types/datStatus.ts index fcaa3c078..e29ee8679 100644 --- a/src/types/datStatus.ts +++ b/src/types/datStatus.ts @@ -144,9 +144,11 @@ export default class DATStatus { } private static append(map: Map, romType: ROMType, val: T): void { - const arr = map.get(romType) ?? []; - arr.push(val); - map.set(romType, arr); + if (!map.has(romType)) { + map.set(romType, [val]); + } else { + map.get(romType)?.push(val); + } } getDATName(): string { @@ -159,7 +161,7 @@ export default class DATStatus { ...this.incompleteRomTypesToReleaseCandidates.values(), ] .flat() - .filter(ArrayPoly.filterNotNullish) + .filter((releaseCandidate) => releaseCandidate !== undefined) .flatMap((releaseCandidate) => releaseCandidate.getRomsWithFiles()) .map((romWithFiles) => romWithFiles.getInputFile()); } @@ -170,8 +172,8 @@ export default class DATStatus { anyGamesFound(options: Options): boolean { return DATStatus.getAllowedTypes(options) .reduce((result, romType) => { - const foundReleaseCandidates = ( - this.foundRomTypesToReleaseCandidates.get(romType) ?? []).length; + const foundReleaseCandidates = this.foundRomTypesToReleaseCandidates + .get(romType)?.length ?? 0; return result || foundReleaseCandidates > 0; }, false); } @@ -361,7 +363,7 @@ export default class DATStatus { ): T[] { return DATStatus.getAllowedTypes(options) .flatMap((type) => romTypesToValues.get(type)) - .filter(ArrayPoly.filterNotNullish) + .filter((value) => value !== undefined) .reduce(ArrayPoly.reduceUnique(), []) .sort(); } @@ -377,6 +379,6 @@ export default class DATStatus { options.getOnlyRetail() || (!options.getOnlyBios() && !options.getOnlyDevice()) ? ROMType.RETAIL : undefined, ROMType.PATCHED, - ].filter(ArrayPoly.filterNotNullish); + ].filter((romType) => romType !== undefined); } } diff --git a/src/types/dats/cmpro/cmProParser.ts b/src/types/dats/cmpro/cmProParser.ts index d36de7633..71bc86a85 100644 --- a/src/types/dats/cmpro/cmProParser.ts +++ b/src/types/dats/cmpro/cmProParser.ts @@ -35,26 +35,26 @@ export interface GameProps extends CMProObject { sample?: SampleProps | SampleProps[], // NON-STANDARD PROPERTIES comment?: string, - serial?: string, - publisher?: string, - releaseyear?: string, - releasemonth?: string, - developer?: string, - users?: string, - esrbrating?: string, + // serial?: string, + // publisher?: string, + // releaseyear?: string, + // releasemonth?: string, + // developer?: string, + // users?: string, + // esrbrating?: string, genre?: string, } export interface ROMProps extends CMProObject { name?: string, - merge?: string, + // merge?: string, size?: string, crc?: string, - flags?: string, + // flags?: string, md5?: string, sha1?: string, // NON-STANDARD PROPERTIES - serial?: string, + // serial?: string, } export interface DiskProps extends ROMProps {} diff --git a/src/types/dats/dat.ts b/src/types/dats/dat.ts index 9d1b4aeb7..342902cfa 100644 --- a/src/types/dats/dat.ts +++ b/src/types/dats/dat.ts @@ -107,33 +107,7 @@ export default abstract class DAT { return FsPoly.makeLegal(filename.trim()); } - /** - * Does a DAT explicitly contain headered ROMs. It is possible for a DAT to be both non-headered - * and non-headerless. - */ - isHeadered(): boolean { - // No-Intro "headerless" DATs have this field set - if (this.getHeader().getClrMamePro()?.getHeader()) { - return false; - } - - return this.getName().match(/\(headered\)/i) !== null; - } - - /** - * Does a DAT explicitly contain headerless ROMs. It is possible for a DAT to be both non-headered - * and non-headerless. - */ - isHeaderless(): boolean { - // No-Intro "headerless" DATs have this field set - if (this.getHeader().getClrMamePro()?.getHeader()) { - return true; - } - - return this.getName().match(/\(headerless\)/i) !== null; - } - - getRequiredChecksumBitmask(): number { + getRequiredRomChecksumBitmask(): number { let checksumBitmask = 0; this.getGames().forEach((game) => game.getRoms().forEach((rom) => { if (rom.getCrc32() && rom.getSize()) { @@ -149,6 +123,22 @@ export default abstract class DAT { return checksumBitmask; } + getRequiredDiskChecksumBitmask(): number { + let checksumBitmask = 0; + this.getGames().forEach((game) => game.getDisks().forEach((disk) => { + if (disk.getCrc32() && disk.getSize()) { + checksumBitmask |= ChecksumBitmask.CRC32; + } else if (disk.getMd5()) { + checksumBitmask |= ChecksumBitmask.MD5; + } else if (disk.getSha1()) { + checksumBitmask |= ChecksumBitmask.SHA1; + } else if (disk.getSha256()) { + checksumBitmask |= ChecksumBitmask.SHA256; + } + })); + return checksumBitmask; + } + /** * Serialize this {@link DAT} to the file contents of an XML file. */ diff --git a/src/types/dats/disk.ts b/src/types/dats/disk.ts index 55b530ee1..4cd60a95e 100644 --- a/src/types/dats/disk.ts +++ b/src/types/dats/disk.ts @@ -1,11 +1,10 @@ -import { Expose } from 'class-transformer'; +import ROM, { ROMProps } from './rom.js'; -interface DiskOptions { - readonly name?: string; - readonly sha1?: string; - readonly md5?: string; - readonly merge?: string; - readonly status?: 'baddump' | 'nodump' | 'good' | 'verified'; +interface DiskProps extends Omit { + size?: number, + // region?: string, + // index?: number, + // writable?: 'yes' | 'no', } /** @@ -13,27 +12,11 @@ interface DiskOptions { * SHA1 do not both need to be specified in the data file:" * @see http://www.logiqx.com/DatFAQs/CMPro.php */ -export default class Disk implements DiskOptions { - @Expose({ name: 'name' }) - readonly name: string; - - @Expose({ name: 'sha1' }) - readonly sha1: string; - - @Expose({ name: 'md5' }) - readonly md5: string; - - @Expose({ name: 'merge' }) - readonly merge: string; - - @Expose({ name: 'status' }) - readonly status: 'baddump' | 'nodump' | 'good' | 'verified'; - - constructor(options?: DiskOptions) { - this.name = options?.name ?? ''; - this.sha1 = options?.sha1 ?? ''; - this.md5 = options?.md5 ?? ''; - this.merge = options?.merge ?? ''; - this.status = options?.status ?? 'good'; +export default class Disk extends ROM implements DiskProps { + constructor(props?: DiskProps) { + super(props ? { + ...props, + size: props?.size ?? 0, + } : undefined); } } diff --git a/src/types/dats/game.ts b/src/types/dats/game.ts index b2a65970f..33c6a8ec0 100644 --- a/src/types/dats/game.ts +++ b/src/types/dats/game.ts @@ -4,6 +4,7 @@ import { Expose, Transform, Type } from 'class-transformer'; import ArrayPoly from '../../polyfill/arrayPoly.js'; import Internationalization from '../internationalization.js'; +import Disk from './disk.js'; import Release from './release.js'; import ROM from './rom.js'; @@ -67,7 +68,7 @@ export interface GameProps { // readonly manufacturer?: string, readonly release?: Release | Release[], readonly rom?: ROM | ROM[], - // readonly disk?: Disk | Disk[], + readonly disk?: Disk | Disk[], } /** @@ -124,6 +125,11 @@ export default class Game implements GameProps { @Transform(({ value }) => value || []) readonly rom?: ROM | ROM[]; + @Expose() + @Type(() => Disk) + @Transform(({ value }) => value || []) + readonly disk?: Disk | Disk[]; + constructor(props?: GameProps) { this.name = props?.name ?? ''; this.category = props?.category ?? ''; @@ -134,8 +140,9 @@ export default class Game implements GameProps { this.romOf = props?.romOf; this.sampleOf = props?.sampleOf; this.genre = props?.genre; - this.release = props?.release ?? []; - this.rom = props?.rom ?? []; + this.release = props?.release; + this.rom = props?.rom; + this.disk = props?.disk; } /** @@ -212,6 +219,15 @@ export default class Game implements GameProps { return []; } + getDisks(): Disk[] { + if (Array.isArray(this.disk)) { + return this.disk; + } if (this.disk) { + return [this.disk]; + } + return []; + } + // Computed getters getRevision(): number { @@ -578,7 +594,7 @@ export default class Game implements GameProps { const releaseLanguages = this.getReleases() .map((release) => release.getLanguage()) - .filter(ArrayPoly.filterNotNullish); + .filter((language) => language !== undefined); if (releaseLanguages.length > 0) { return releaseLanguages; } @@ -615,7 +631,7 @@ export default class Game implements GameProps { .map((lang) => lang.toUpperCase()) .map((lang) => Internationalization.LANGUAGE_OPTIONS .find((langOpt) => langOpt.long?.toUpperCase() === lang.toUpperCase())?.short) - .filter(ArrayPoly.filterNotNullish) + .filter((lang) => lang !== undefined) .filter((lang) => Internationalization.LANGUAGES.includes(lang)) // is known .reduce(ArrayPoly.reduceUnique(), []); if (threeMatchesParsed.length > 0) { @@ -637,7 +653,7 @@ export default class Game implements GameProps { } return undefined; }) - .filter(ArrayPoly.filterNotNullish); + .filter((language) => language !== undefined); } // Immutable setters diff --git a/src/types/files/archives/archive.ts b/src/types/files/archives/archive.ts index 8eece28b7..cc1c3f95c 100644 --- a/src/types/files/archives/archive.ts +++ b/src/types/files/archives/archive.ts @@ -34,9 +34,14 @@ export default abstract class Archive { ): Promise { const tempFile = await fsPoly.mktemp(path.join( Temp.getTempDir(), - path.basename(entryPath), + fsPoly.makeLegal(path.basename(entryPath) || path.parse(this.getFilePath()).name), )); + const tempDir = path.dirname(tempFile); + if (!await fsPoly.exists(tempDir)) { + await fsPoly.mkdir(tempDir, { recursive: true }); + } + try { await this.extractEntryToFile(entryPath, tempFile); return await callback(tempFile); diff --git a/src/types/files/archives/archiveEntry.ts b/src/types/files/archives/archiveEntry.ts index e3439c59a..845bb72be 100644 --- a/src/types/files/archives/archiveEntry.ts +++ b/src/types/files/archives/archiveEntry.ts @@ -31,12 +31,14 @@ export default class ArchiveEntry extends File implements Arc filePath: archiveEntryProps.archive.getFilePath(), }); this.archive = archiveEntryProps.archive; - this.entryPath = path.normalize(archiveEntryProps.entryPath); + this.entryPath = archiveEntryProps.entryPath + ? path.normalize(archiveEntryProps.entryPath) + : archiveEntryProps.entryPath; } static async entryOf( archiveEntryProps: ArchiveEntryProps, - checksumBitmask: number = ChecksumBitmask.CRC32, + checksumBitmask: number = ChecksumBitmask.NONE, ): Promise> { let finalSize = archiveEntryProps.size; let finalCrcWithHeader = archiveEntryProps.crc32; @@ -151,7 +153,8 @@ export default class ArchiveEntry extends File implements Arc } getExtractedFilePath(): string { - return this.entryPath; + // Note: {@link Chd} will stuff some extra metadata in the entry path, chop it out + return this.entryPath.split('|')[0]; } getEntryPath(): string { @@ -219,6 +222,9 @@ export default class ArchiveEntry extends File implements Arc } withFilePath(filePath: string): ArchiveEntry { + if (this.getArchive().getFilePath() === filePath) { + return this; + } return new ArchiveEntry({ ...this, archive: this.getArchive().withFilePath(filePath), @@ -271,9 +277,9 @@ export default class ArchiveEntry extends File implements Arc toString(): string { if (this.getSymlinkSource()) { - return `${this.getFilePath()}|${this.getEntryPath()} -> ${this.getSymlinkSource()}|${this.getEntryPath()}`; + return `${this.getFilePath()}|${this.getExtractedFilePath()} -> ${this.getSymlinkSource()}|${this.getExtractedFilePath()}`; } - return `${this.getFilePath()}|${this.getEntryPath()}`; + return `${this.getFilePath()}|${this.getExtractedFilePath()}`; } equals(other: File): boolean { diff --git a/src/types/files/archives/chd/chd.ts b/src/types/files/archives/chd/chd.ts new file mode 100644 index 000000000..2de8ac00b --- /dev/null +++ b/src/types/files/archives/chd/chd.ts @@ -0,0 +1,208 @@ +import fs from 'node:fs'; +import path from 'node:path'; +import { Readable } from 'node:stream'; +import util from 'node:util'; + +import { Mutex } from 'async-mutex'; +import chdman, { CHDInfo, CHDType } from 'chdman'; +import { Memoize } from 'typescript-memoize'; + +import Temp from '../../../../globals/temp.js'; +import FsPoly from '../../../../polyfill/fsPoly.js'; +import ExpectedError from '../../../expectedError.js'; +import File from '../../file.js'; +import { ChecksumBitmask } from '../../fileChecksums.js'; +import Archive from '../archive.js'; +import ArchiveEntry from '../archiveEntry.js'; +import ChdBinCueParser from './chdBinCueParser.js'; +import ChdGdiParser from './chdGdiParser.js'; + +export default class Chd extends Archive { + private tempSingletonHandles = 0; + + private readonly tempSingletonMutex = new Mutex(); + + private tempSingletonDirPath?: string; + + private tempSingletonFilePath?: string; + + // eslint-disable-next-line class-methods-use-this + protected new(filePath: string): Archive { + return new Chd(filePath); + } + + static getExtensions(): string[] { + return ['.chd']; + } + + // eslint-disable-next-line class-methods-use-this + getExtension(): string { + return Chd.getExtensions()[0]; + } + + async getArchiveEntries(checksumBitmask: number): Promise[]> { + const info = await this.getInfo(); + + if (checksumBitmask === ChecksumBitmask.NONE) { + // Doing a quick scan + return this.getArchiveEntriesSingleFile(info, checksumBitmask); + } + + if (info.type === CHDType.CD_ROM) { + return ChdBinCueParser.getArchiveEntriesBinCue(this, checksumBitmask); + } if (info.type === CHDType.GD_ROM) { + // TODO(cemmer): allow parsing GD-ROM to bin/cue https://github.com/mamedev/mame/issues/11903 + return ChdGdiParser.getArchiveEntriesGdRom(this, checksumBitmask); + } + + return this.getArchiveEntriesSingleFile(info, checksumBitmask); + } + + private async getArchiveEntriesSingleFile( + info: CHDInfo, + checksumBitmask: number, + ): Promise[]> { + // MAME DAT s use the data+metadata SHA1 (vs. just the data SHA1) + const rawEntry = await ArchiveEntry.entryOf({ + archive: this, + entryPath: '', + sha1: info.sha1, + // There isn't a way for us to calculate these other checksums, so fill it in with garbage + size: 0, + crc32: checksumBitmask & ChecksumBitmask.CRC32 ? 'x'.repeat(8) : undefined, + md5: checksumBitmask & ChecksumBitmask.MD5 ? 'x'.repeat(32) : undefined, + sha256: checksumBitmask & ChecksumBitmask.SHA256 ? 'x'.repeat(64) : undefined, + }, checksumBitmask); + + const extractedEntry = await ArchiveEntry.entryOf({ + archive: this, + entryPath: '', + size: info.logicalSize, + /** + * NOTE(cemmer): the "data SHA1" equals the original input file in these tested cases: + * - PSP .iso -> .chd with createdvd (and NOT createcd) + */ + sha1: info.dataSha1, + }, checksumBitmask); + + return [rawEntry, extractedEntry]; + } + + async extractEntryToFile( + entryPath: string, + extractedFilePath: string, + ): Promise { + return this.extractEntryToStreamCached( + entryPath, + async (stream) => new Promise((resolve, reject) => { + const writeStream = fs.createWriteStream(extractedFilePath); + writeStream.on('close', resolve); + writeStream.on('error', reject); + stream.pipe(writeStream); + }), + ); + } + + private async extractEntryToStreamCached( + entryPath: string, + callback: (stream: Readable) => (Promise | T), + ): Promise { + await this.tempSingletonMutex.runExclusive(async () => { + this.tempSingletonHandles += 1; + + if (this.tempSingletonDirPath !== undefined) { + return; + } + this.tempSingletonDirPath = await FsPoly.mkdtemp(path.join(Temp.getTempDir(), 'chd')); + this.tempSingletonFilePath = path.join(this.tempSingletonDirPath, 'extracted'); + + const info = await this.getInfo(); + if (info.type === CHDType.RAW) { + await chdman.extractRaw({ + inputFilename: this.getFilePath(), + outputFilename: this.tempSingletonFilePath, + }); + } else if (info.type === CHDType.HARD_DISK) { + await chdman.extractHd({ + inputFilename: this.getFilePath(), + outputFilename: this.tempSingletonFilePath, + }); + } else if (info.type === CHDType.CD_ROM) { + const cueFile = `${this.tempSingletonFilePath}.cue`; + this.tempSingletonFilePath += '.bin'; + await chdman.extractCd({ + inputFilename: this.getFilePath(), + outputFilename: cueFile, + outputBinFilename: this.tempSingletonFilePath, + }); + await FsPoly.rm(cueFile, { force: true }); + } else if (info.type === CHDType.GD_ROM) { + this.tempSingletonFilePath = path.join(this.tempSingletonDirPath, 'track.gdi'); + await chdman.extractCd({ + inputFilename: this.getFilePath(), + outputFilename: this.tempSingletonFilePath, + }); + // Apply TOSEC-style CRLF line separators to the .gdi file + await util.promisify(fs.writeFile)( + this.tempSingletonFilePath, + (await util.promisify(fs.readFile)(this.tempSingletonFilePath)).toString() + .replace(/\r?\n/g, '\r\n'), + ); + } else if (info.type === CHDType.DVD_ROM) { + await chdman.extractDvd({ + inputFilename: this.getFilePath(), + outputFilename: this.tempSingletonFilePath, + }); + } else { + throw new ExpectedError(`couldn't detect CHD type for: ${this.getFilePath()}`); + } + + if (!await FsPoly.exists(this.tempSingletonFilePath)) { + throw new ExpectedError(`failed to extract ${this.getFilePath()}|${entryPath} to ${this.tempSingletonFilePath}`); + } + }); + + const [extractedEntryPath, sizeAndOffset] = entryPath.split('|'); + let filePath = this.tempSingletonFilePath as string; + if (extractedEntryPath + && await FsPoly.exists(path.join(this.tempSingletonDirPath as string, extractedEntryPath)) + ) { + // The entry path is the name of a real extracted file, use that + filePath = path.join(this.tempSingletonDirPath as string, extractedEntryPath); + } + + // Parse the entry path for any extra start/stop parameters + const [trackSize, trackOffset] = (sizeAndOffset ?? '').split('@'); + const streamStart = Number.parseInt(trackOffset ?? '0', 10); + const streamEnd = !trackSize || Number.isNaN(Number(trackSize)) + ? undefined + : Number.parseInt(trackOffset ?? '0', 10) + Number.parseInt(trackSize, 10) - 1; + + try { + return await File.createStreamFromFile( + filePath, + callback, + streamStart, + streamEnd, + ); + } catch (error) { + throw new ExpectedError(`failed to read ${this.getFilePath()}|${entryPath} at ${filePath}: ${error}`); + } finally { + // Give a grace period before deleting the temp file, the next read may be of the same file + setTimeout(async () => { + await this.tempSingletonMutex.runExclusive(async () => { + this.tempSingletonHandles -= 1; + if (this.tempSingletonHandles <= 0) { + await FsPoly.rm(this.tempSingletonDirPath as string, { recursive: true, force: true }); + this.tempSingletonDirPath = undefined; + } + }); + }, 5000); + } + } + + @Memoize() + async getInfo(): Promise { + return chdman.info({ inputFilename: this.getFilePath() }); + } +} diff --git a/src/types/files/archives/chd/chdBinCueParser.ts b/src/types/files/archives/chd/chdBinCueParser.ts new file mode 100644 index 000000000..354e7e768 --- /dev/null +++ b/src/types/files/archives/chd/chdBinCueParser.ts @@ -0,0 +1,153 @@ +import fs from 'node:fs'; +import path from 'node:path'; +import util from 'node:util'; + +import { + File as CueFile, parse, Track, TrackDataType, +} from '@gplane/cue'; +import chdman from 'chdman'; + +import Temp from '../../../../globals/temp.js'; +import FsPoly from '../../../../polyfill/fsPoly.js'; +import ExpectedError from '../../../expectedError.js'; +import FileChecksums, { ChecksumBitmask } from '../../fileChecksums.js'; +import Archive from '../archive.js'; +import ArchiveEntry from '../archiveEntry.js'; + +/** + * https://github.com/putnam/binmerge + */ +export default class ChdBinCueParser { + public static async getArchiveEntriesBinCue( + archive: T, + checksumBitmask: number, + ): Promise[]> { + const tempFile = await FsPoly.mktemp(path.join( + Temp.getTempDir(), + path.basename(archive.getFilePath()), + )); + + const tempDir = path.dirname(tempFile); + if (!await FsPoly.exists(tempDir)) { + await FsPoly.mkdir(tempDir, { recursive: true }); + } + + const cueFile = `${tempFile}.cue`; + const binFile = `${tempFile}.bin`; + + try { + await chdman.extractCd({ + inputFilename: archive.getFilePath(), + outputFilename: cueFile, + outputBinFilename: binFile, + }); + return await this.parseCue(archive, cueFile, binFile, checksumBitmask); + } finally { + await FsPoly.rm(cueFile, { force: true }); + await FsPoly.rm(binFile, { force: true }); + } + } + + private static async parseCue( + archive: T, + cueFilePath: string, + binFilePath: string, + checksumBitmask: number, + ): Promise[]> { + const cueData = await util.promisify(fs.readFile)(cueFilePath); + const cueSheet = parse(cueData.toString(), { + fatal: true, + }).sheet; + + const binFiles = (await Promise.all(cueSheet.files.flatMap(async (file) => this.parseCueFile( + archive, + file, + binFilePath, + checksumBitmask, + )))).flat(); + + const cueFile = await ArchiveEntry.entryOf({ + archive, + entryPath: `${path.parse(archive.getFilePath()).name}.cue`, + // Junk size and checksums because we don't know what it should be + size: 0, + crc32: checksumBitmask & ChecksumBitmask.CRC32 ? 'x'.repeat(8) : undefined, + md5: checksumBitmask & ChecksumBitmask.MD5 ? 'x'.repeat(32) : undefined, + sha1: checksumBitmask & ChecksumBitmask.SHA1 ? 'x'.repeat(40) : undefined, + sha256: checksumBitmask & ChecksumBitmask.SHA256 ? 'x'.repeat(64) : undefined, + }); + + return [cueFile, ...binFiles]; + } + + private static async parseCueFile( + archive: T, + file: CueFile, + binFilePath: string, + checksumBitmask: number, + ): Promise[]> { + // Determine the global block size from the first track in the file + const filePath = path.join(path.dirname(binFilePath), file.name); + const fileSize = await FsPoly.size(filePath); + const firstTrack = file.tracks.at(0); + if (!firstTrack) { + return []; + } + const globalBlockSize = ChdBinCueParser.parseCueTrackBlockSize(firstTrack); + let nextItemTimeOffset = Math.floor(fileSize / globalBlockSize); + + const { name: archiveName } = path.parse(archive.getFilePath()); + return (await Promise.all( + file.tracks + .reverse() + .flatMap(async (track) => { + const firstIndex = track.indexes.at(0); + if (!firstIndex) { + return undefined; + } + + const [minutes, seconds, fields] = firstIndex.startingTime; + const startingTimeOffset = fields + (seconds * 75) + (minutes * 60 * 75); + const sectors = nextItemTimeOffset - startingTimeOffset; + nextItemTimeOffset = startingTimeOffset; + const trackOffset = startingTimeOffset * globalBlockSize; + const trackSize = sectors * globalBlockSize; + + const checksums = await FileChecksums.hashFile( + binFilePath, + checksumBitmask, + trackOffset, + trackOffset + trackSize - 1, + ); + + return ArchiveEntry.entryOf({ + archive, + entryPath: `${archiveName} (Track ${track.trackNumber}).bin|${trackSize}@${trackOffset}`, + size: trackSize, + ...checksums, + }, checksumBitmask); + }) + .reverse(), + )) + .filter((entry) => entry !== undefined); + } + + private static parseCueTrackBlockSize(firstTrack: Track): number { + switch (firstTrack.dataType) { + case TrackDataType.Audio: + case TrackDataType['Mode1/2352']: + case TrackDataType['Mode2/2352']: + case TrackDataType['Cdi/2352']: + return 2352; + case TrackDataType.Cdg: + return 2448; + case TrackDataType['Mode1/2048']: + return 2048; + case TrackDataType['Mode2/2336']: + case TrackDataType['Cdi/2336']: + return 2336; + default: + throw new ExpectedError(`unknown track type ${TrackDataType[firstTrack.dataType]}`); + } + } +} diff --git a/src/types/files/archives/chd/chdGdiParser.ts b/src/types/files/archives/chd/chdGdiParser.ts new file mode 100644 index 000000000..451d9218c --- /dev/null +++ b/src/types/files/archives/chd/chdGdiParser.ts @@ -0,0 +1,96 @@ +import fs from 'node:fs'; +import path from 'node:path'; +import util from 'node:util'; + +import async, { AsyncResultCallback } from 'async'; +import chdman from 'chdman'; +import fg from 'fast-glob'; + +import Defaults from '../../../../globals/defaults.js'; +import Temp from '../../../../globals/temp.js'; +import FsPoly from '../../../../polyfill/fsPoly.js'; +import ExpectedError from '../../../expectedError.js'; +import FileChecksums from '../../fileChecksums.js'; +import Archive from '../archive.js'; +import ArchiveEntry from '../archiveEntry.js'; + +/** + * https://dreamcast.wiki/GDI_format + */ +export default class ChdGdiParser { + public static async getArchiveEntriesGdRom( + archive: T, + checksumBitmask: number, + ): Promise[]> { + const tempDir = await FsPoly.mkdtemp(path.join(Temp.getTempDir(), 'chd-gdi')); + const gdiFilePath = path.join(tempDir, `${path.parse(archive.getFilePath()).name}.gdi`); + let binRawFilePaths: string[] = []; + + try { + await chdman.extractCd({ + inputFilename: archive.getFilePath(), + outputFilename: gdiFilePath, + }); + binRawFilePaths = await fg(`${fg.convertPathToPattern(tempDir)}/*.{bin,raw}`); + if (binRawFilePaths.length === 0) { + throw new ExpectedError(`failed to find bin/raw files for GD-ROM: ${archive.getFilePath()}`); + } + return await this.parseGdi(archive, gdiFilePath, binRawFilePaths, checksumBitmask); + } finally { + await FsPoly.rm(gdiFilePath, { force: true }); + await Promise.all(binRawFilePaths.map(async (file) => FsPoly.rm(file, { force: true }))); + } + } + + private static async parseGdi( + archive: T, + gdiFilePath: string, + binRawFilePaths: string[], + checksumBitmask: number, + ): Promise[]> { + const gdiExtractedContents = await util.promisify(fs.readFile)(gdiFilePath); + + const { name: filePrefix } = path.parse(gdiFilePath); + const gdiContents = `${gdiExtractedContents.toString() + .split(/\r?\n/) + .filter((line) => line) + // Replace the chdman-generated track files with TOSEC-style track filenames + .map((line) => line + .replace(filePrefix, 'track') + .replace(/"/g, '')) + .join('\r\n')}\r\n`; + + const gdiFile = await ArchiveEntry.entryOf({ + archive, + entryPath: path.basename(gdiFilePath), + size: gdiContents.length, + ...await FileChecksums.hashData(gdiContents, checksumBitmask), + }); + + const binRawFiles = await async.mapLimit( + binRawFilePaths, + Defaults.ARCHIVE_ENTRY_SCANNER_THREADS_PER_ARCHIVE, + async (binRawFilePath, callback: AsyncResultCallback, Error>) => { + try { + const binRawFile = await ArchiveEntry.entryOf({ + archive, + entryPath: path.basename(binRawFilePath).replace(filePrefix, 'track'), + size: await FsPoly.size(binRawFilePath), + ...await FileChecksums.hashFile(binRawFilePath, checksumBitmask), + }); + callback(undefined, binRawFile); + } catch (error) { + if (error instanceof Error) { + callback(error); + } else if (typeof error === 'string') { + callback(new Error(error)); + } else { + callback(new Error(`unknown error when parsing GD-ROM bin/raw file: ${binRawFilePath}`)); + } + } + }, + ); + + return [gdiFile, ...binRawFiles]; + } +} diff --git a/src/types/files/archives/maxcso/cso.ts b/src/types/files/archives/maxcso/cso.ts new file mode 100644 index 000000000..8e649c7fc --- /dev/null +++ b/src/types/files/archives/maxcso/cso.ts @@ -0,0 +1,18 @@ +import Archive from '../archive.js'; +import Maxcso from './maxcso.js'; + +export default class Cso extends Maxcso { + // eslint-disable-next-line class-methods-use-this + protected new(filePath: string): Archive { + return new Cso(filePath); + } + + static getExtensions(): string[] { + return ['.cso']; + } + + // eslint-disable-next-line class-methods-use-this + getExtension(): string { + return Cso.getExtensions()[0]; + } +} diff --git a/src/types/files/archives/maxcso/dax.ts b/src/types/files/archives/maxcso/dax.ts new file mode 100644 index 000000000..73107cffc --- /dev/null +++ b/src/types/files/archives/maxcso/dax.ts @@ -0,0 +1,18 @@ +import Archive from '../archive.js'; +import Maxcso from './maxcso.js'; + +export default class Dax extends Maxcso { + // eslint-disable-next-line class-methods-use-this + protected new(filePath: string): Archive { + return new Dax(filePath); + } + + static getExtensions(): string[] { + return ['.dax']; + } + + // eslint-disable-next-line class-methods-use-this + getExtension(): string { + return Dax.getExtensions()[0]; + } +} diff --git a/src/types/files/archives/maxcso/maxcso.ts b/src/types/files/archives/maxcso/maxcso.ts new file mode 100644 index 000000000..c96c14461 --- /dev/null +++ b/src/types/files/archives/maxcso/maxcso.ts @@ -0,0 +1,32 @@ +import path from 'node:path'; + +import maxcso from 'maxcso'; + +import { ChecksumBitmask } from '../../fileChecksums.js'; +import Archive from '../archive.js'; +import ArchiveEntry from '../archiveEntry.js'; + +export default abstract class Maxcso extends Archive { + async getArchiveEntries(checksumBitmask: number): Promise[]> { + const entryPath = `${path.parse(this.getFilePath()).name}.iso`; + const size = (await maxcso.header(this.getFilePath())).uncompressedSize; + let crc32: string | undefined; + if (checksumBitmask === ChecksumBitmask.NONE || checksumBitmask & ChecksumBitmask.CRC32) { + crc32 = await maxcso.uncompressedCrc32(this.getFilePath()); + } + + return [await ArchiveEntry.entryOf({ + archive: this, + entryPath, + size: Number(size), + crc32, + }, checksumBitmask)]; + } + + async extractEntryToFile(entryPath: string, extractedFilePath: string): Promise { + return maxcso.decompress({ + inputFilename: this.getFilePath(), + outputFilename: extractedFilePath, + }); + } +} diff --git a/src/types/files/archives/maxcso/zso.ts b/src/types/files/archives/maxcso/zso.ts new file mode 100644 index 000000000..3dd463737 --- /dev/null +++ b/src/types/files/archives/maxcso/zso.ts @@ -0,0 +1,18 @@ +import Archive from '../archive.js'; +import Maxcso from './maxcso.js'; + +export default class Zso extends Maxcso { + // eslint-disable-next-line class-methods-use-this + protected new(filePath: string): Archive { + return new Zso(filePath); + } + + static getExtensions(): string[] { + return ['.zso']; + } + + // eslint-disable-next-line class-methods-use-this + getExtension(): string { + return Zso.getExtensions()[0]; + } +} diff --git a/src/types/files/archives/nkitIso.ts b/src/types/files/archives/nkitIso.ts new file mode 100644 index 000000000..94fe54dac --- /dev/null +++ b/src/types/files/archives/nkitIso.ts @@ -0,0 +1,46 @@ +import path from 'node:path'; + +import FilePoly from '../../../polyfill/filePoly.js'; +import ExpectedError from '../../expectedError.js'; +import Archive from './archive.js'; +import ArchiveEntry from './archiveEntry.js'; + +// @see https://wiki.gbatemp.net/wiki/NKit/NKitFormat +export default class NkitIso extends Archive { + // eslint-disable-next-line class-methods-use-this + protected new(filePath: string): Archive { + return new NkitIso(filePath); + } + + static getExtensions(): string[] { + return ['.nkit.iso']; + } + + // eslint-disable-next-line class-methods-use-this + getExtension(): string { + return NkitIso.getExtensions()[0]; + } + + // eslint-disable-next-line class-methods-use-this,@typescript-eslint/require-await + async extractEntryToFile(): Promise { + throw new ExpectedError('extraction isn\'t supported for NKit ISO files'); + } + + async getArchiveEntries(): Promise[]> { + const file = await FilePoly.fileFrom(this.getFilePath(), 'r'); + try { + const crc32 = (await file.readAt(0x2_08, 0x4)).toString('hex'); + const size = (await file.readAt(0x2_10, 0x4)).readUInt32BE(); + + const archiveEntry = await ArchiveEntry.entryOf({ + archive: this, + entryPath: path.basename(this.getFilePath()).replace(/\.nkit/i, ''), + size, + crc32, + }); + return [archiveEntry]; + } finally { + await file.close(); + } + } +} diff --git a/src/types/files/archives/gzip.ts b/src/types/files/archives/sevenZip/gzip.ts similarity index 86% rename from src/types/files/archives/gzip.ts rename to src/types/files/archives/sevenZip/gzip.ts index eb6aa9caa..df7469aa8 100644 --- a/src/types/files/archives/gzip.ts +++ b/src/types/files/archives/sevenZip/gzip.ts @@ -1,7 +1,7 @@ -import Archive from './archive.js'; -import ArchiveEntry from './archiveEntry.js'; +import Archive from '../archive.js'; +import ArchiveEntry from '../archiveEntry.js'; +import Tar from '../tar.js'; import SevenZip from './sevenZip.js'; -import Tar from './tar.js'; export default class Gzip extends SevenZip { // eslint-disable-next-line class-methods-use-this diff --git a/src/types/files/archives/sevenZip.ts b/src/types/files/archives/sevenZip/sevenZip.ts similarity index 85% rename from src/types/files/archives/sevenZip.ts rename to src/types/files/archives/sevenZip/sevenZip.ts index 55fea97c4..0b003d3c3 100644 --- a/src/types/files/archives/sevenZip.ts +++ b/src/types/files/archives/sevenZip/sevenZip.ts @@ -4,12 +4,12 @@ import _7z, { Result } from '7zip-min'; import async, { AsyncResultCallback } from 'async'; import { Mutex } from 'async-mutex'; -import Defaults from '../../../globals/defaults.js'; -import Temp from '../../../globals/temp.js'; -import fsPoly from '../../../polyfill/fsPoly.js'; -import ExpectedError from '../../expectedError.js'; -import Archive from './archive.js'; -import ArchiveEntry from './archiveEntry.js'; +import Defaults from '../../../../globals/defaults.js'; +import Temp from '../../../../globals/temp.js'; +import fsPoly from '../../../../polyfill/fsPoly.js'; +import ExpectedError from '../../../expectedError.js'; +import Archive from '../archive.js'; +import ArchiveEntry from '../archiveEntry.js'; export default class SevenZip extends Archive { private static readonly LIST_MUTEX = new Mutex(); @@ -31,17 +31,23 @@ export default class SevenZip extends Archive { async getArchiveEntries(checksumBitmask: number): Promise[]> { /** * WARN(cemmer): even with the above mutex, {@link _7z.list} will still sometimes return no - * entries. Most archives contain at least one file, so assume this is wrong and attempt - * again up to 3 times total. + * entries. This seems to happen more on older Node.js versions (v16, v18) and specific OSes + * (Linux). Most archives contain at least one file, so assume this is wrong and attempt again + * up to 5 times total. */ - for (let attempt = 1; attempt <= 3; attempt += 1) { + const maxAttempts = 5; + for (let attempt = 1; attempt <= maxAttempts; attempt += 1) { const archiveEntries = await this.getArchiveEntriesNotCached(checksumBitmask); if (archiveEntries.length > 0) { return archiveEntries; } + // Backoff with jitter + if (attempt >= maxAttempts) { + break; + } await new Promise((resolve) => { - setTimeout(resolve, Math.random() * (2 ** (attempt - 1) * 100)); + setTimeout(resolve, Math.random() * (2 ** (attempt - 1) * 10)); }); } diff --git a/src/types/files/archives/z.ts b/src/types/files/archives/sevenZip/z.ts similarity index 100% rename from src/types/files/archives/z.ts rename to src/types/files/archives/sevenZip/z.ts diff --git a/src/types/files/archives/zipSpanned.ts b/src/types/files/archives/sevenZip/zipSpanned.ts similarity index 100% rename from src/types/files/archives/zipSpanned.ts rename to src/types/files/archives/sevenZip/zipSpanned.ts diff --git a/src/types/files/archives/zipX.ts b/src/types/files/archives/sevenZip/zipX.ts similarity index 100% rename from src/types/files/archives/zipX.ts rename to src/types/files/archives/sevenZip/zipX.ts diff --git a/src/types/files/archives/zip.ts b/src/types/files/archives/zip.ts index cb005dc05..9780e4620 100644 --- a/src/types/files/archives/zip.ts +++ b/src/types/files/archives/zip.ts @@ -1,7 +1,6 @@ import fs from 'node:fs'; import path from 'node:path'; import { Readable } from 'node:stream'; -import { clearInterval } from 'node:timers'; import archiver, { Archiver } from 'archiver'; import async, { AsyncResultCallback } from 'async'; @@ -10,6 +9,7 @@ import unzipper, { Entry } from 'unzipper'; import Defaults from '../../../globals/defaults.js'; import fsPoly from '../../../polyfill/fsPoly.js'; import StreamPoly from '../../../polyfill/streamPoly.js'; +import Timer from '../../../timer.js'; import ExpectedError from '../../expectedError.js'; import File from '../file.js'; import FileChecksums, { ChecksumBitmask, ChecksumProps } from '../fileChecksums.js'; @@ -78,9 +78,9 @@ export default class Zip extends Archive { entryPath: string, extractedFilePath: string, ): Promise { - const localDir = path.dirname(extractedFilePath); - if (!await fsPoly.exists(localDir)) { - await fsPoly.mkdir(localDir, { recursive: true }); + const extractedDir = path.dirname(extractedFilePath); + if (!await fsPoly.exists(extractedDir)) { + await fsPoly.mkdir(extractedDir, { recursive: true }); } return this.extractEntryToStream( @@ -215,9 +215,9 @@ export default class Zip extends Archive { // Leave the input stream open until we're done writing it await new Promise((resolve) => { - const interval = setInterval(() => { + const timer = Timer.setInterval(() => { if (writtenEntries.has(entryName) || zipFileError) { - clearInterval(interval); + timer.cancel(); resolve(); } }, 10); diff --git a/src/types/files/file.ts b/src/types/files/file.ts index 1c5148786..bf02b967c 100644 --- a/src/types/files/file.ts +++ b/src/types/files/file.ts @@ -408,6 +408,19 @@ export default class File implements FileProps { https.get(this.getFilePath(), { timeout: 30_000, }, (res) => { + if (res.statusCode !== undefined + && res.statusCode >= 300 && res.statusCode < 400 + && res.headers.location + ) { + // Handle redirects + File.fileOf({ filePath: res.headers.location }) + .then(async (file) => file.downloadToPath(filePath)) + .then(resolve) + .catch(reject); + res.destroy(); + return; + } + const writeStream = fs.createWriteStream(filePath); res.pipe(writeStream); writeStream.on('finish', async () => { diff --git a/src/types/files/fileCache.ts b/src/types/files/fileCache.ts index b0021d764..381c0448f 100644 --- a/src/types/files/fileCache.ts +++ b/src/types/files/fileCache.ts @@ -12,10 +12,17 @@ import ROMHeader from './romHeader.js'; interface CacheValue { fileSize: number, modifiedTimeMillis: number, - value: FileProps | ArchiveEntryProps[] | string | undefined, + value: number + // getOrComputeFileChecksums() + | FileProps + // getOrComputeArchiveChecksums() + | ArchiveEntryProps[] + // getOrComputeFileHeader(), getOrComputeFileSignature() + | string | undefined, } const ValueType = { + INODE: 'I', FILE_CHECKSUMS: 'F', ARCHIVE_CHECKSUMS: 'A', // ROM headers and file signatures may not be found for files, and that is a valid result that @@ -26,53 +33,54 @@ const ValueType = { }; export default class FileCache { - private static readonly VERSION = 3; + private static readonly VERSION = 4; - private static cache: Cache = new Cache(); + private cache: Cache = new Cache(); - private static enabled = true; + private enabled = true; - public static disable(): void { + disable(): void { this.enabled = false; } - public static async loadFile(filePath: string): Promise { + async loadFile(cacheFilePath: string): Promise { this.cache = await new Cache({ - filePath, - fileFlushMillis: 30_000, + filePath: cacheFilePath, + fileFlushMillis: 60_000, saveOnExit: true, }).load(); // Cleanup the loaded cache file // Delete keys from old cache versions - await Promise.all([...Array.from({ length: FileCache.VERSION }).keys()].slice(1) - .map(async (prevVersion) => { - const keyRegex = new RegExp(`^V${prevVersion}\\|`); - return this.cache.delete(keyRegex); - })); + await this.cache.delete(new RegExp(`^V(${[...Array.from({ length: FileCache.VERSION }).keys()].slice(1).join('|')})\\|`)); // Delete keys from old value types await this.cache.delete(new RegExp(`\\|(?!(${Object.values(ValueType).join('|')}))[^|]+$`)); // Delete keys for deleted files - const disks = FsPoly.disksSync(); Timer.setTimeout(async () => { - await Promise.all([...this.cache.keys()] - .map((cacheKey) => cacheKey.split('|')[1]) + const cacheKeyFilePaths = [...this.cache.keys()] + .filter((cacheKey) => cacheKey.endsWith(`|${ValueType.INODE}`)) + .map((cacheKey) => ([cacheKey, cacheKey.split('|')[1]])) // Don't delete the key if it's for a disk that isn't mounted right now - .filter((cacheKeyFilePath) => disks.some((disk) => cacheKeyFilePath.startsWith(disk))) + .filter(([, filePath]) => FsPoly.diskResolved(filePath)) // Only process a reasonably sized subset of the keys .sort(() => Math.random() - 0.5) - .slice(0, Defaults.MAX_FS_THREADS) - .map(async (cacheKeyFilePath) => { - if (!await FsPoly.exists(cacheKeyFilePath)) { - // If the file no longer exists, then delete its key from the cache - await this.cache.delete(cacheKeyFilePath); - } - })); + .slice(0, Defaults.MAX_FS_THREADS); + + await Promise.all(cacheKeyFilePaths.map(async ([cacheKey, filePath]) => { + if (!await FsPoly.exists(filePath)) { + // Delete the related cache keys + const inode = (await this.cache.get(cacheKey))?.value as number; + await this.cache.delete(new RegExp(`^V${FileCache.VERSION}\\|${inode}\\|`)); + + // Delete the inode key from the cache + await this.cache.delete(cacheKey); + } + })); }, 5000); } - public static async save(): Promise { + async save(): Promise { if (!this.enabled) { return; } @@ -80,17 +88,13 @@ export default class FileCache { await this.cache.save(); } - static async getOrComputeFileChecksums( + async getOrComputeFileChecksums( filePath: string, checksumBitmask: number, ): Promise { - if (!this.enabled || checksumBitmask === ChecksumBitmask.NONE) { - return File.fileOf({ filePath }, checksumBitmask); - } - // NOTE(cemmer): we're explicitly not catching ENOENT errors here, we want it to bubble up const stats = await FsPoly.stat(filePath); - const cacheKey = this.getCacheKey(filePath, ValueType.FILE_CHECKSUMS); + const cacheKey = await this.getCacheKey(filePath, undefined, ValueType.FILE_CHECKSUMS); // NOTE(cemmer): we're using the cache as a mutex here, so even if this function is called // multiple times concurrently, entries will only be fetched once. @@ -112,13 +116,13 @@ export default class FileCache { } const cachedFile = cached.value as FileProps; - const existingBitmask = ((cachedFile.crc32 !== undefined && cachedFile.crc32 !== '00000000') ? ChecksumBitmask.CRC32 : 0) + const existingBitmask = ((cachedFile.crc32) ? ChecksumBitmask.CRC32 : 0) | (cachedFile.md5 ? ChecksumBitmask.MD5 : 0) | (cachedFile.sha1 ? ChecksumBitmask.SHA1 : 0) | (cachedFile.sha256 ? ChecksumBitmask.SHA256 : 0); - const remainingBitmask = checksumBitmask ^ existingBitmask; + const remainingBitmask = checksumBitmask - (checksumBitmask & existingBitmask); // We need checksums that haven't been cached yet - return remainingBitmask !== 0; + return remainingBitmask > 0; }, ); @@ -130,27 +134,24 @@ export default class FileCache { // We didn't compute the file (cache hit), deserialize the properties into a full object const cachedFile = cachedValue.value as FileProps; - return File.fileOfObject(filePath, { - ...cachedFile, - // Only return the checksums requested - crc32: checksumBitmask & ChecksumBitmask.CRC32 ? cachedFile.crc32 : undefined, - md5: checksumBitmask & ChecksumBitmask.MD5 ? cachedFile.md5 : undefined, - sha1: checksumBitmask & ChecksumBitmask.SHA1 ? cachedFile.sha1 : undefined, - sha256: checksumBitmask & ChecksumBitmask.SHA256 ? cachedFile.sha256 : undefined, - }); + return File.fileOfObject(filePath, cachedFile); } - static async getOrComputeArchiveChecksums( + async getOrComputeArchiveChecksums( archive: T, checksumBitmask: number, ): Promise[]> { - if (!this.enabled || checksumBitmask === ChecksumBitmask.NONE) { - return archive.getArchiveEntries(checksumBitmask); - } - // NOTE(cemmer): we're explicitly not catching ENOENT errors here, we want it to bubble up const stats = await FsPoly.stat(archive.getFilePath()); - const cacheKey = this.getCacheKey(archive.getFilePath(), ValueType.ARCHIVE_CHECKSUMS); + if (stats.size === 0) { + // An empty file can't have entries + return []; + } + const cacheKey = await this.getCacheKey( + archive.getFilePath(), + undefined, + ValueType.ARCHIVE_CHECKSUMS, + ); // NOTE(cemmer): we're using the cache as a mutex here, so even if this function is called // multiple times concurrently, entries will only be fetched once. @@ -172,13 +173,14 @@ export default class FileCache { } const cachedEntries = cached.value as ArchiveEntryProps[]; - const existingBitmask = (cachedEntries.every((props) => props.crc32 !== undefined && props.crc32 !== '00000000') ? ChecksumBitmask.CRC32 : 0) + const existingBitmask = (cachedEntries + .every((props) => props.crc32) ? ChecksumBitmask.CRC32 : 0) | (cachedEntries.every((props) => props.md5) ? ChecksumBitmask.MD5 : 0) | (cachedEntries.every((props) => props.sha1) ? ChecksumBitmask.SHA1 : 0) | (cachedEntries.every((props) => props.sha256) ? ChecksumBitmask.SHA256 : 0); - const remainingBitmask = checksumBitmask ^ existingBitmask; + const remainingBitmask = checksumBitmask - (checksumBitmask & existingBitmask); // We need checksums that haven't been cached yet - return remainingBitmask !== 0; + return remainingBitmask > 0; }, ); @@ -191,20 +193,22 @@ export default class FileCache { // We didn't compute the archive entries (cache hit), deserialize the properties into // full objects const cachedEntries = cachedValue.value as ArchiveEntryProps[]; - return Promise.all(cachedEntries.map(async (props) => ArchiveEntry.entryOfObject(archive, { - ...props, - // Only return the checksums requested - crc32: checksumBitmask & ChecksumBitmask.CRC32 ? props.crc32 : undefined, - md5: checksumBitmask & ChecksumBitmask.MD5 ? props.md5 : undefined, - sha1: checksumBitmask & ChecksumBitmask.SHA1 ? props.sha1 : undefined, - sha256: checksumBitmask & ChecksumBitmask.SHA256 ? props.sha256 : undefined, - }))); + return Promise.all(cachedEntries + .map(async (props) => ArchiveEntry.entryOfObject(archive, props))); } - static async getOrComputeFileHeader(file: File): Promise { + async getOrComputeFileHeader(file: File): Promise { // NOTE(cemmer): we're explicitly not catching ENOENT errors here, we want it to bubble up const stats = await FsPoly.stat(file.getFilePath()); - const cacheKey = this.getCacheKey(file.toString(), ValueType.ROM_HEADER); + if (stats.size === 0) { + // An empty file can't have a header + return undefined; + } + const cacheKey = await this.getCacheKey( + file.getFilePath(), + file instanceof ArchiveEntry ? file.getEntryPath() : undefined, + ValueType.ROM_HEADER, + ); const cachedValue = await this.cache.getOrCompute( cacheKey, @@ -235,10 +239,18 @@ export default class FileCache { return ROMHeader.headerFromName(cachedHeaderName); } - static async getOrComputeFileSignature(file: File): Promise { + async getOrComputeFileSignature(file: File): Promise { // NOTE(cemmer): we're explicitly not catching ENOENT errors here, we want it to bubble up const stats = await FsPoly.stat(file.getFilePath()); - const cacheKey = this.getCacheKey(file.toString(), ValueType.FILE_SIGNATURE); + if (stats.size === 0) { + // An empty file can't have a signature + return undefined; + } + const cacheKey = await this.getCacheKey( + file.getFilePath(), + file instanceof ArchiveEntry ? file.getEntryPath() : undefined, + ValueType.FILE_SIGNATURE, + ); const cachedValue = await this.cache.getOrCompute( cacheKey, @@ -269,7 +281,19 @@ export default class FileCache { return FileSignature.signatureFromName(cachedSignatureName); } - private static getCacheKey(filePath: string, valueType: string): string { - return `V${FileCache.VERSION}|${filePath}|${valueType}`; + private async getCacheKey( + filePath: string, + entryPath: string | undefined, + valueType: string, + ): Promise { + const stats = await FsPoly.stat(filePath); + const inodeKey = `V${FileCache.VERSION}|${filePath}|${ValueType.INODE}`; + await this.cache.set(inodeKey, { + fileSize: stats.size, + modifiedTimeMillis: stats.mtimeMs, + value: stats.ino, + }); + + return `V${FileCache.VERSION}|${stats.ino}|${entryPath ?? ''}|${valueType}`; } } diff --git a/src/types/files/fileFactory.ts b/src/types/files/fileFactory.ts index 450dba971..f15724e60 100644 --- a/src/types/files/fileFactory.ts +++ b/src/types/files/fileFactory.ts @@ -1,39 +1,57 @@ +import URLPoly from '../../polyfill/urlPoly.js'; import ExpectedError from '../expectedError.js'; import Archive from './archives/archive.js'; import ArchiveEntry from './archives/archiveEntry.js'; import ArchiveFile from './archives/archiveFile.js'; -import Gzip from './archives/gzip.js'; +import Chd from './archives/chd/chd.js'; +import Cso from './archives/maxcso/cso.js'; +import Dax from './archives/maxcso/dax.js'; +import Zso from './archives/maxcso/zso.js'; +import NkitIso from './archives/nkitIso.js'; import Rar from './archives/rar.js'; -import SevenZip from './archives/sevenZip.js'; +import Gzip from './archives/sevenZip/gzip.js'; +import SevenZip from './archives/sevenZip/sevenZip.js'; +import Z from './archives/sevenZip/z.js'; +import ZipSpanned from './archives/sevenZip/zipSpanned.js'; +import ZipX from './archives/sevenZip/zipX.js'; import Tar from './archives/tar.js'; -import Z from './archives/z.js'; import Zip from './archives/zip.js'; -import ZipSpanned from './archives/zipSpanned.js'; -import ZipX from './archives/zipX.js'; import File from './file.js'; import FileCache from './fileCache.js'; import { ChecksumBitmask } from './fileChecksums.js'; import FileSignature from './fileSignature.js'; +import ROMHeader from './romHeader.js'; export default class FileFactory { - static async filesFrom( + private readonly fileCache: FileCache; + + constructor(fileCache: FileCache) { + this.fileCache = fileCache; + } + + async filesFrom( filePath: string, - checksumBitmask: number = ChecksumBitmask.CRC32, + fileChecksumBitmask: number = ChecksumBitmask.CRC32, + archiveChecksumBitmask = fileChecksumBitmask, ): Promise { - if (!this.isExtensionArchive(filePath)) { - const entries = await this.entriesFromArchiveSignature(filePath, checksumBitmask); + if (URLPoly.canParse(filePath)) { + return [await File.fileOf({ filePath })]; + } + + if (!FileFactory.isExtensionArchive(filePath)) { + const entries = await this.entriesFromArchiveSignature(filePath, archiveChecksumBitmask); if (entries !== undefined) { return entries; } - return [await this.fileFrom(filePath, checksumBitmask)]; + return [await this.fileFrom(filePath, fileChecksumBitmask)]; } try { - const entries = await this.entriesFromArchiveExtension(filePath, checksumBitmask); + const entries = await this.entriesFromArchiveExtension(filePath, archiveChecksumBitmask); if (entries !== undefined) { return entries; } - return [await this.fileFrom(filePath, checksumBitmask)]; + return [await this.fileFrom(filePath, fileChecksumBitmask)]; } catch (error) { if (error && typeof error === 'object' && 'code' in error && error.code === 'ENOENT') { throw new ExpectedError(`file doesn't exist: ${filePath}`); @@ -45,14 +63,14 @@ export default class FileFactory { } } - public static async fileFrom( + async fileFrom( filePath: string, checksumBitmask: number, ): Promise { - return FileCache.getOrComputeFileChecksums(filePath, checksumBitmask); + return this.fileCache.getOrComputeFileChecksums(filePath, checksumBitmask); } - public static async archiveFileFrom( + async archiveFileFrom( archive: Archive, checksumBitmask: number, ): Promise { @@ -68,7 +86,7 @@ export default class FileFactory { * * This ordering should match {@link ROMScanner#archiveEntryPriority} */ - private static async entriesFromArchiveExtension( + private async entriesFromArchiveExtension( filePath: string, checksumBitmask: number, fileExt = filePath.replace(/.+?(?=(\.[a-zA-Z0-9]+)+)/, ''), @@ -90,11 +108,21 @@ export default class FileFactory { archive = new ZipSpanned(filePath); } else if (ZipX.getExtensions().some((ext) => fileExt.toLowerCase().endsWith(ext))) { archive = new ZipX(filePath); + } else if (Chd.getExtensions().some((ext) => fileExt.toLowerCase().endsWith(ext))) { + archive = new Chd(filePath); + } else if (Cso.getExtensions().some((ext) => fileExt.toLowerCase().endsWith(ext))) { + archive = new Cso(filePath); + } else if (Dax.getExtensions().some((ext) => fileExt.toLowerCase().endsWith(ext))) { + archive = new Dax(filePath); + } else if (Zso.getExtensions().some((ext) => fileExt.toLowerCase().endsWith(ext))) { + archive = new Zso(filePath); + } else if (NkitIso.getExtensions().some((ext) => fileExt.toLowerCase().endsWith(ext))) { + archive = new NkitIso(filePath); } else { return undefined; } - return FileCache.getOrComputeArchiveChecksums(archive, checksumBitmask); + return this.fileCache.getOrComputeArchiveChecksums(archive, checksumBitmask); } /** @@ -103,18 +131,19 @@ export default class FileFactory { * * This ordering should match {@link ROMScanner#archiveEntryPriority} */ - private static async entriesFromArchiveSignature( + private async entriesFromArchiveSignature( filePath: string, checksumBitmask: number, ): Promise[] | undefined> { let signature: FileSignature | undefined; try { const file = await File.fileOf({ filePath }); - signature = await FileCache.getOrComputeFileSignature(file); + signature = await this.fileCache.getOrComputeFileSignature(file); } catch { // Fail silently on assumed I/O errors return undefined; } + if (!signature) { return undefined; } @@ -137,6 +166,20 @@ export default class FileFactory { ...Z.getExtensions(), ...ZipSpanned.getExtensions(), ...ZipX.getExtensions(), + // Compressed images + ...Cso.getExtensions(), + ...Dax.getExtensions(), + ...Zso.getExtensions(), + ...Chd.getExtensions(), + ...NkitIso.getExtensions(), ].some((ext) => filePath.toLowerCase().endsWith(ext)); } + + async headerFrom(file: File): Promise { + return this.fileCache.getOrComputeFileHeader(file); + } + + async signatureFrom(file: File): Promise { + return this.fileCache.getOrComputeFileSignature(file); + } } diff --git a/src/types/files/fileSignature.ts b/src/types/files/fileSignature.ts index 116a3aab3..ed129b378 100644 --- a/src/types/files/fileSignature.ts +++ b/src/types/files/fileSignature.ts @@ -13,6 +13,11 @@ export default class FileSignature { // @see https://file-extension.net/seeker/ // @see https://gbatemp.net/threads/help-with-rom-iso-console-identification.611378/ private static readonly SIGNATURES: { [key: string]: FileSignature } = { + // ********** GENERAL ********** + + // @see https://en.wikipedia.org/wiki/List_of_file_signatures + elf: new FileSignature('.elf', [{ value: Buffer.from('\x7FELF') }]), + // ********** ARCHIVES ********** // @see https://en.wikipedia.org/wiki/List_of_file_signatures @@ -76,9 +81,13 @@ export default class FileSignature { // ********** ROMs - GENERAL ********** + chd: new FileSignature('.chd', [{ value: Buffer.from('MComprHD') }]), + // @see https://docs.fileformat.com/disc-and-media/cso/ cso: new FileSignature('.cso', [{ value: Buffer.from('CISO') }]), + dax: new FileSignature('.dax', [{ value: Buffer.from('DAX') }]), + // @see https://en.wikipedia.org/wiki/List_of_file_signatures isz: new FileSignature('.isz', [{ value: Buffer.from('IsZ!') }]), @@ -112,6 +121,15 @@ export default class FileSignature { // Nintendo - Game & Watch gw: new FileSignature('.bin', [{ value: Buffer.from('main.bs') }]), + // Nintendo - GameCube + // @see https://github.com/dolphin-emu/dolphin/blob/1f5e100a0e6dd4f9ab3784fd6373d452054d08bf/Source/Core/DiscIO/CompressedBlob.h#L25 (reversed) + gcz: new FileSignature('.gcz', [{ value: Buffer.from('01C00BB1', 'hex') }]), + // @see https://wiki.gbatemp.net/wiki/NKit/NKitFormat + nkit_iso: new FileSignature('.nkit.iso', [{ offset: 0x2_00, value: Buffer.from('NKIT') }]), + // @see https://github.com/dolphin-emu/dolphin/blob/master/docs/WiaAndRvz.md + rvz: new FileSignature('.rvz', [{ value: Buffer.from('RVZ\x01') }]), // "RVZ\x01" + wia: new FileSignature('.wia', [{ value: Buffer.from('WIA\x01') }]), // "WIA\x01" + // Nintendo - Game Boy // @see https://gbdev.io/pandocs/The_Cartridge_Header.html gb: new FileSignature('.gb', [ @@ -154,6 +172,14 @@ export default class FileSignature { smc_gd3_1: new FileSignature('.smc', [{ value: Buffer.from('\x00\x01ME DOCTOR SF 3') }]), // Game Doctor SF3? smc_gd3_2: new FileSignature('.smc', [{ value: Buffer.from('GAME DOCTOR SF 3') }]), // Game Doctor SF3/SF6/SF7 + // Nintendo - Wii + // @see https://wit.wiimm.de/info/wdf.html + wdf: new FileSignature('.wdf', [{ value: Buffer.from('WII\x01DISC') }]), + + // Nintendo - Wii U + // @see https://github.com/cemu-project/Cemu/blob/7522c8470ee27d50a68ba662ae721b69018f3a8f/src/Cafe/Filesystem/WUD/wud.h#L25 + wux: new FileSignature('.wux', [{ value: Buffer.from('WUX0\x2E\xD0\x99\x10') }]), + // Sega - 32X // @see https://github.com/jcfieldsdev/genesis-rom-utility/blob/31826bca66c8c6c467c37c1b711943eb5464e7e8/genesis_rom.chm // @see https://plutiedev.com/rom-header @@ -191,6 +217,7 @@ export default class FileSignature { pico: new FileSignature('.md', [{ offset: 0x1_00, value: Buffer.from('SEGA PICO') }]), // Sony - PlayStation Portable + // @see https://www.psdevwiki.com/ps3/Eboot.PBP pbp: new FileSignature('.pbp', [{ value: Buffer.from('\x00PBP\x00\x00\x01\x00') }]), }; diff --git a/src/types/indexedFiles.ts b/src/types/indexedFiles.ts index 5eaddc16b..562751af1 100644 --- a/src/types/indexedFiles.ts +++ b/src/types/indexedFiles.ts @@ -53,52 +53,72 @@ export default class IndexedFiles { // Build the maps files.forEach((file) => { const crc32WithSize = `${file.getCrc32()}|${file.getSize()}`; - crc32RawMap.set(crc32WithSize, [file, ...(crc32RawMap.get(crc32WithSize) ?? [])]); + if (!crc32RawMap.has(crc32WithSize)) { + crc32RawMap.set(crc32WithSize, [file]); + } else { + crc32RawMap.get(crc32WithSize)?.unshift(file); + } const md5 = file.getMd5(); if (md5) { - md5RawMap.set(md5, [file, ...(md5RawMap.get(md5) ?? [])]); + if (!md5RawMap.has(md5)) { + md5RawMap.set(md5, [file]); + } else { + md5RawMap.get(md5)?.unshift(file); + } } const sha1 = file.getSha1(); if (sha1) { - sha1RawMap.set(sha1, [file, ...(sha1RawMap.get(sha1) ?? [])]); + if (!sha1RawMap.has(sha1)) { + sha1RawMap.set(sha1, [file]); + } else { + sha1RawMap.get(sha1)?.unshift(file); + } } const sha256 = file.getSha256(); if (sha256) { - sha256RawMap.set(sha256, [file, ...(sha256RawMap.get(sha256) ?? [])]); + if (!sha256RawMap.has(sha256)) { + sha256RawMap.set(sha256, [file]); + } else { + sha256RawMap.get(sha256)?.unshift(file); + } } if (file.getFileHeader()) { const crc32WithoutHeader = `${file.getCrc32WithoutHeader()}|${file.getSizeWithoutHeader()}`; - crc32WithoutHeaderMap.set( - crc32WithoutHeader, - [...(crc32WithoutHeaderMap.get(crc32WithoutHeader) ?? []), file], - ); + if (!crc32WithoutHeaderMap.has(crc32WithoutHeader)) { + crc32WithoutHeaderMap.set(crc32WithoutHeader, [file]); + } else { + crc32WithoutHeaderMap.get(crc32WithoutHeader)?.push(file); + } const md5WithoutHeader = file.getMd5WithoutHeader(); if (md5WithoutHeader) { - md5WithoutHeaderMap.set( - md5WithoutHeader, - [...(md5WithoutHeaderMap.get(md5WithoutHeader) ?? []), file], - ); + if (!md5WithoutHeaderMap.has(md5WithoutHeader)) { + md5WithoutHeaderMap.set(md5WithoutHeader, [file]); + } else { + md5WithoutHeaderMap.get(md5WithoutHeader)?.push(file); + } } const sha1WithoutHeader = file.getSha1WithoutHeader(); if (sha1WithoutHeader) { - sha1WithoutHeaderMap.set( - sha1WithoutHeader, - [...(sha1WithoutHeaderMap.get(sha1WithoutHeader) ?? []), file], - ); + if (!sha1WithoutHeaderMap.has(sha1WithoutHeader)) { + sha1WithoutHeaderMap.set(sha1WithoutHeader, [file]); + } else { + sha1WithoutHeaderMap.get(sha1WithoutHeader)?.push(file); + } } const sha256WithoutHeader = file.getSha256WithoutHeader(); if (sha256WithoutHeader) { - sha256WithoutHeaderMap.set( - sha256WithoutHeader, - [...(sha256WithoutHeaderMap.get(sha256WithoutHeader) ?? []), file], - ); + if (!sha256WithoutHeaderMap.has(sha256WithoutHeader)) { + sha256WithoutHeaderMap.set(sha256WithoutHeader, [file]); + } else { + sha256WithoutHeaderMap.get(sha256WithoutHeader)?.push(file); + } } } }); @@ -136,6 +156,19 @@ export default class IndexedFiles { .filter(ArrayPoly.filterUniqueMapped((file) => file.toString())); } + @Memoize() + getFilesByFilePath(): Map { + return this.getFiles().reduce((map, file) => { + const key = file.getFilePath(); + if (!map.has(key)) { + map.set(key, [file]); + } else { + map.get(key)?.push(file); + } + return map; + }, new Map()); + } + getSize(): number { return this.getFiles().length; } @@ -144,22 +177,22 @@ export default class IndexedFiles { * Find file(s) in the index based some search criteria. */ findFiles(file: File | ROM): File[] | undefined { - const { sha256 } = file; + const sha256 = file.sha256?.replace(/[^0-9a-f]/ig, ''); if (sha256 && this.sha256.has(sha256)) { return this.sha256.get(sha256); } - const { sha1 } = file; + const sha1 = file.sha1?.replace(/[^0-9a-f]/ig, ''); if (sha1 && this.sha1.has(sha1)) { return this.sha1.get(sha1); } - const { md5 } = file; + const md5 = file.md5?.replace(/[^0-9a-f]/ig, ''); if (md5 && this.md5.has(md5)) { return this.md5.get(md5); } - const { crc32 } = file; + const crc32 = file.crc32?.replace(/[^0-9a-f]/ig, ''); if (crc32) { const crc32WithSize = `${crc32}|${file.getSize()}`; if (this.crc32.has(crc32WithSize)) { diff --git a/src/types/options.ts b/src/types/options.ts index 797ff4d47..e85efa9cc 100644 --- a/src/types/options.ts +++ b/src/types/options.ts @@ -4,9 +4,7 @@ import os from 'node:os'; import path from 'node:path'; import async, { AsyncResultCallback } from 'async'; -import { - Expose, instanceToPlain, plainToInstance, Transform, -} from 'class-transformer'; +import { Expose, instanceToPlain, plainToInstance } from 'class-transformer'; import fg from 'fast-glob'; import { isNotJunk } from 'junk'; import micromatch from 'micromatch'; @@ -18,7 +16,8 @@ import Temp from '../globals/temp.js'; import ArrayPoly from '../polyfill/arrayPoly.js'; import fsPoly, { FsWalkCallback } from '../polyfill/fsPoly.js'; import URLPoly from '../polyfill/urlPoly.js'; -import DAT from './dats/dat.js'; +import Disk from './dats/disk.js'; +import ROM from './dats/rom.js'; import ExpectedError from './expectedError.js'; import File from './files/file.js'; import { ChecksumBitmask } from './files/fileChecksums.js'; @@ -58,20 +57,24 @@ export enum FixExtension { ALWAYS = 3, } +export enum PreferRevision { + OLDER = 1, + NEWER = 2, +} + export interface OptionsProps { readonly commands?: string[], - readonly fixdat?: boolean; readonly input?: string[], readonly inputExclude?: string[], - readonly inputMinChecksum?: string, + readonly inputChecksumQuick?: boolean, + readonly inputChecksumMin?: string, + readonly inputChecksumMax?: string, readonly inputChecksumArchives?: string, readonly dat?: string[], readonly datExclude?: string[], - readonly datRegex?: string, readonly datNameRegex?: string, - readonly datRegexExclude?: string, readonly datNameRegexExclude?: string, readonly datDescriptionRegex?: string, readonly datDescriptionRegexExclude?: string, @@ -108,14 +111,14 @@ export interface OptionsProps { readonly removeHeaders?: string[], readonly mergeRoms?: string, + readonly excludeDisks?: boolean, + readonly allowExcessSets?: boolean, readonly allowIncompleteSets?: boolean, readonly filterRegex?: string, readonly filterRegexExclude?: string, readonly filterLanguage?: string[], - readonly languageFilter?: string[], readonly filterRegion?: string[], - readonly regionFilter?: string[], readonly noBios?: boolean, readonly onlyBios?: boolean, readonly noDevice?: boolean, @@ -151,11 +154,8 @@ export interface OptionsProps { readonly preferGood?: boolean, readonly preferLanguage?: string[], readonly preferRegion?: string[], - readonly preferRevisionNewer?: boolean, - readonly preferRevisionOlder?: boolean, + readonly preferRevision?: string, readonly preferRetail?: boolean, - readonly preferNTSC?: boolean, - readonly preferPAL?: boolean, readonly preferParent?: boolean, readonly reportOutput?: string, @@ -178,13 +178,15 @@ export default class Options implements OptionsProps { @Expose({ name: '_' }) readonly commands: string[]; - readonly fixdat: boolean; - readonly input: string[]; readonly inputExclude: string[]; - readonly inputMinChecksum?: string; + readonly inputChecksumQuick: boolean; + + readonly inputChecksumMin?: string; + + readonly inputChecksumMax?: string; readonly inputChecksumArchives?: string; @@ -192,12 +194,8 @@ export default class Options implements OptionsProps { readonly datExclude: string[]; - readonly datRegex: string; - readonly datNameRegex: string; - readonly datRegexExclude: string; - readonly datNameRegexExclude: string; readonly datDescriptionRegex: string; @@ -256,6 +254,10 @@ export default class Options implements OptionsProps { readonly mergeRoms?: string; + readonly excludeDisks: boolean; + + readonly allowExcessSets: boolean; + readonly allowIncompleteSets: boolean; readonly filterRegex: string; @@ -264,12 +266,8 @@ export default class Options implements OptionsProps { readonly filterLanguage: string[]; - readonly languageFilter: string[]; - readonly filterRegion: string[]; - readonly regionFilter: string[]; - readonly noBios: boolean; readonly onlyBios: boolean; @@ -338,20 +336,10 @@ export default class Options implements OptionsProps { readonly preferRegion: string[]; - readonly preferRevisionNewer: boolean; - - readonly preferRevisionOlder: boolean; + readonly preferRevision?: string; readonly preferRetail: boolean; - @Expose({ name: 'preferNtsc' }) - @Transform(({ value }) => !!value) - readonly preferNTSC: boolean; - - @Expose({ name: 'preferPal' }) - @Transform(({ value }) => !!value) - readonly preferPAL: boolean; - readonly preferParent: boolean; readonly reportOutput: string; @@ -376,18 +364,17 @@ export default class Options implements OptionsProps { constructor(options?: OptionsProps) { this.commands = options?.commands ?? []; - this.fixdat = options?.fixdat ?? false; this.input = options?.input ?? []; this.inputExclude = options?.inputExclude ?? []; - this.inputMinChecksum = options?.inputMinChecksum; + this.inputChecksumQuick = options?.inputChecksumQuick ?? false; + this.inputChecksumMin = options?.inputChecksumMin; + this.inputChecksumMax = options?.inputChecksumMax; this.inputChecksumArchives = options?.inputChecksumArchives; this.dat = options?.dat ?? []; this.datExclude = options?.datExclude ?? []; - this.datRegex = options?.datRegex ?? ''; this.datNameRegex = options?.datNameRegex ?? ''; - this.datRegexExclude = options?.datRegexExclude ?? ''; this.datNameRegexExclude = options?.datNameRegexExclude ?? ''; this.datDescriptionRegex = options?.datDescriptionRegex ?? ''; this.datDescriptionRegexExclude = options?.datDescriptionRegexExclude ?? ''; @@ -406,9 +393,11 @@ export default class Options implements OptionsProps { this.dirLetterLimit = options?.dirLetterLimit ?? 0; this.dirLetterGroup = options?.dirLetterGroup ?? false; this.dirGameSubdir = options?.dirGameSubdir; + this.fixExtension = options?.fixExtension; this.overwrite = options?.overwrite ?? false; this.overwriteInvalid = options?.overwriteInvalid ?? false; + this.cleanExclude = options?.cleanExclude ?? []; this.cleanBackup = options?.cleanBackup; this.cleanDryRun = options?.cleanDryRun ?? false; @@ -423,14 +412,14 @@ export default class Options implements OptionsProps { this.removeHeaders = options?.removeHeaders; this.mergeRoms = options?.mergeRoms; + this.excludeDisks = options?.excludeDisks ?? false; + this.allowExcessSets = options?.allowExcessSets ?? false; this.allowIncompleteSets = options?.allowIncompleteSets ?? false; this.filterRegex = options?.filterRegex ?? ''; this.filterRegexExclude = options?.filterRegexExclude ?? ''; this.filterLanguage = options?.filterLanguage ?? []; - this.languageFilter = options?.languageFilter ?? []; this.filterRegion = options?.filterRegion ?? []; - this.regionFilter = options?.regionFilter ?? []; this.noBios = options?.noBios ?? false; this.onlyBios = options?.onlyBios ?? false; this.noDevice = options?.noDevice ?? false; @@ -466,11 +455,8 @@ export default class Options implements OptionsProps { this.preferGood = options?.preferGood ?? false; this.preferLanguage = options?.preferLanguage ?? []; this.preferRegion = options?.preferRegion ?? []; - this.preferRevisionNewer = options?.preferRevisionNewer ?? false; - this.preferRevisionOlder = options?.preferRevisionOlder ?? false; + this.preferRevision = options?.preferRevision; this.preferRetail = options?.preferRetail ?? false; - this.preferNTSC = options?.preferNTSC ?? false; - this.preferPAL = options?.preferPAL ?? false; this.preferParent = options?.preferParent ?? false; this.reportOutput = options?.reportOutput ?? ''; @@ -538,7 +524,7 @@ export default class Options implements OptionsProps { * The writing command that was specified. */ writeString(): string | undefined { - return ['copy', 'move', 'link', 'symlink'].find((command) => this.getCommands().has(command)); + return ['copy', 'move', 'link'].find((command) => this.getCommands().has(command)); } /** @@ -569,6 +555,16 @@ export default class Options implements OptionsProps { return this.getCommands().has('extract'); } + /** + * Should a given ROM be extracted? + */ + shouldExtractRom(rom: ROM): boolean { + if (rom instanceof Disk) { + return false; + } + return this.shouldExtract(); + } + /** * Was the `zip` command provided? */ @@ -579,10 +575,14 @@ export default class Options implements OptionsProps { /** * Should a given output file path be zipped? */ - shouldZipFile(filePath: string): boolean { + shouldZipRom(rom: ROM): boolean { + if (rom instanceof Disk) { + return false; + } + return this.shouldZip() && (!this.getZipExclude() || !micromatch.isMatch( - filePath.replace(/^.[\\/]/, ''), + rom.getName().replace(/^.[\\/]/, ''), this.getZipExclude(), )); } @@ -598,7 +598,7 @@ export default class Options implements OptionsProps { * Was the 'fixdat' command provided? */ shouldFixdat(): boolean { - return this.getCommands().has('fixdat') || this.fixdat; + return this.getCommands().has('fixdat'); } /** @@ -765,9 +765,22 @@ export default class Options implements OptionsProps { return globPattern; } - getInputMinChecksum(): ChecksumBitmask | undefined { + getInputChecksumQuick(): boolean { + return this.inputChecksumQuick; + } + + getInputChecksumMin(): ChecksumBitmask | undefined { const checksumBitmask = Object.keys(ChecksumBitmask) - .find((bitmask) => bitmask.toUpperCase() === this.inputMinChecksum?.toUpperCase()); + .find((bitmask) => bitmask.toUpperCase() === this.inputChecksumMin?.toUpperCase()); + if (!checksumBitmask) { + return undefined; + } + return ChecksumBitmask[checksumBitmask as keyof typeof ChecksumBitmask]; + } + + getInputChecksumMax(): ChecksumBitmask | undefined { + const checksumBitmask = Object.keys(ChecksumBitmask) + .find((bitmask) => bitmask.toUpperCase() === this.inputChecksumMax?.toUpperCase()); if (!checksumBitmask) { return undefined; } @@ -809,11 +822,11 @@ export default class Options implements OptionsProps { } getDatNameRegex(): RegExp[] | undefined { - return Options.getRegex(this.datNameRegex || this.datRegex); + return Options.getRegex(this.datNameRegex); } getDatNameRegexExclude(): RegExp[] | undefined { - return Options.getRegex(this.datNameRegexExclude || this.datRegexExclude); + return Options.getRegex(this.datNameRegexExclude); } getDatDescriptionRegex(): RegExp[] | undefined { @@ -993,17 +1006,7 @@ export default class Options implements OptionsProps { /** * Can the {@link Header} be removed for a {@link extension} during writing? */ - canRemoveHeader(dat: DAT, extension: string): boolean { - // ROMs in "headered" DATs shouldn't have their header removed - if (dat.isHeadered()) { - return false; - } - - // ROMs in "headerless" DATs should have their header removed - if (dat.isHeaderless()) { - return true; - } - + canRemoveHeader(extension: string): boolean { if (this.removeHeaders === undefined) { // Option wasn't provided, we shouldn't remove headers return false; @@ -1026,9 +1029,16 @@ export default class Options implements OptionsProps { return MergeMode[mergeMode as keyof typeof MergeMode]; } + getExcludeDisks(): boolean { + return this.excludeDisks; + } + + getAllowExcessSets(): boolean { + return this.allowExcessSets; + } + getAllowIncompleteSets(): boolean { - // If we're only reading, then go ahead and report on incomplete sets - return this.allowIncompleteSets || !this.shouldWrite(); + return this.allowIncompleteSets; } getFilterRegex(): RegExp[] | undefined { @@ -1043,9 +1053,6 @@ export default class Options implements OptionsProps { if (this.filterLanguage.length > 0) { return new Set(Options.filterUniqueUpper(this.filterLanguage)); } - if (this.languageFilter.length > 0) { - return new Set(Options.filterUniqueUpper(this.languageFilter)); - } return new Set(); } @@ -1053,9 +1060,6 @@ export default class Options implements OptionsProps { if (this.filterRegion.length > 0) { return new Set(Options.filterUniqueUpper(this.filterRegion)); } - if (this.regionFilter.length > 0) { - return new Set(Options.filterUniqueUpper(this.regionFilter)); - } return new Set(); } @@ -1195,26 +1199,19 @@ export default class Options implements OptionsProps { return Options.filterUniqueUpper(this.preferRegion); } - getPreferRevisionNewer(): boolean { - return this.preferRevisionNewer; - } - - getPreferRevisionOlder(): boolean { - return this.preferRevisionOlder; + getPreferRevision(): PreferRevision | undefined { + const preferRevision = Object.keys(PreferRevision) + .find((mode) => mode.toLowerCase() === this.preferRevision?.toLowerCase()); + if (!preferRevision) { + return undefined; + } + return PreferRevision[preferRevision as keyof typeof PreferRevision]; } getPreferRetail(): boolean { return this.preferRetail; } - getPreferNTSC(): boolean { - return this.preferNTSC; - } - - getPreferPAL(): boolean { - return this.preferPAL; - } - getPreferParent(): boolean { return this.preferParent; } diff --git a/src/types/outputFactory.ts b/src/types/outputFactory.ts index f4b6584a0..6a6b59a27 100644 --- a/src/types/outputFactory.ts +++ b/src/types/outputFactory.ts @@ -4,6 +4,7 @@ import path, { ParsedPath } from 'node:path'; import ArrayPoly from '../polyfill/arrayPoly.js'; import fsPoly from '../polyfill/fsPoly.js'; import DAT from './dats/dat.js'; +import Disk from './dats/disk.js'; import Game from './dats/game.js'; import Release from './dats/release.js'; import ROM from './dats/rom.js'; @@ -109,7 +110,7 @@ export default class OutputFactory { * ************************* */ - public static getDir( + static getDir( options: Options, dat: DAT, game?: Game, @@ -132,11 +133,14 @@ export default class OutputFactory { )); if (options.getDirMirror() && inputFile?.getFilePath()) { - const mirroredDir = path.dirname(inputFile.getFilePath()) - .split(/[\\/]/) - .splice(1) - .join(path.sep); - output = path.join(output, mirroredDir); + const mirroredFilePath = options.getInputPaths() + .map((inputPath) => path.resolve(inputPath)) + .reduce((inputFilePath, inputPath) => { + const inputPathRegex = new RegExp(`^${inputPath.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}[\\/]?`); + return inputFilePath.replace(inputPathRegex, ''); + }, path.resolve(inputFile.getFilePath())); + const mirroredDirPath = path.dirname(mirroredFilePath); + output = path.join(output, mirroredDirPath); } if (options.getDirDatName() && dat.getNameShort()) { @@ -169,7 +173,7 @@ export default class OutputFactory { result = this.replaceGameTokens(result, game); result = this.replaceDatTokens(result, dat); result = this.replaceInputTokens(result, inputRomPath); - result = this.replaceOutputTokens(result, options, outputRomFilename); + result = this.replaceOutputTokens(result, options, game, outputRomFilename); result = this.replaceOutputGameConsoleTokens(result, dat, outputRomFilename); const leftoverTokens = result.match(/\{[a-zA-Z]+\}/g); @@ -186,17 +190,11 @@ export default class OutputFactory { } let output = input; - output = output - .replace('{region}', release.getRegion()) - .replace('{gameRegion}', release.getRegion()) // deprecated - .replace('{datReleaseRegion}', release.getRegion()); // deprecated + output = output.replace('{region}', release.getRegion()); const releaseLanguage = release.getLanguage(); if (releaseLanguage) { - output = output - .replace('{language}', releaseLanguage) - .replace('{gameLanguage}', releaseLanguage) // deprecated - .replace('{datReleaseLanguage}', releaseLanguage); // deprecated + output = output.replace('{language}', releaseLanguage); } return output; @@ -210,20 +208,15 @@ export default class OutputFactory { const gameRegion = game.getRegions().find(() => true); if (gameRegion) { - // TODO(cemmer): drop the game* prefixed tokens - output = output - .replace('{region}', gameRegion) - .replace('{gameRegion}', gameRegion); + output = output.replace('{region}', gameRegion); } const gameLanguage = game.getLanguages().find(() => true); if (gameLanguage) { - output = output - .replace('{gameLanguage}', gameLanguage) - .replace('{language}', gameLanguage); + output = output.replace('{language}', gameLanguage); } - output = output.replace('{gameType}', game.getGameType()); + output = output.replace('{type}', game.getGameType()); const gameGenre = game.getGenre(); if (gameGenre) { @@ -256,6 +249,7 @@ export default class OutputFactory { private static replaceOutputTokens( input: string, options: Options, + game?: Game, outputRomFilename?: string, ): string { if (!outputRomFilename && options.getFixExtension() === FixExtension.NEVER) { @@ -268,7 +262,7 @@ export default class OutputFactory { return input .replace('{outputBasename}', outputRom.base) .replace('{outputName}', outputRom.name) - .replace('{outputExt}', outputRom.ext.replace(/^\./, '')); + .replace('{outputExt}', outputRom.ext.replace(/^\./, '') || '-'); } private static replaceOutputGameConsoleTokens( @@ -395,7 +389,11 @@ export default class OutputFactory { const subPathsToFilenames = [...filenames] .reduce((subPathMap, filename) => { const subPath = filename.replace(/[\\/].+$/, ''); - subPathMap.set(subPath, [...subPathMap.get(subPath) ?? [], filename]); + if (!subPathMap.has(subPath)) { + subPathMap.set(subPath, [filename]); + } else { + subPathMap.get(subPath)?.push(filename); + } return subPathMap; }, new Map()); const tuples = [...subPathsToFilenames.entries()] @@ -430,7 +428,11 @@ export default class OutputFactory { const subPathsToFilenames = [...filenames] .reduce((subPathMap, filename) => { const subPath = filename.replace(/[\\/].+$/, ''); - subPathMap.set(subPath, [...subPathMap.get(subPath) ?? [], filename]); + if (!subPathMap.has(subPath)) { + subPathMap.set(subPath, [filename]); + } else { + subPathMap.get(subPath)?.push(filename); + } return subPathMap; }, new Map()); @@ -488,8 +490,10 @@ export default class OutputFactory { if ((options.getDirGameSubdir() === GameSubdirMode.MULTIPLE && game.getRoms().length > 1 // Output file is an archive - && !(FileFactory.isExtensionArchive(ext) || inputFile instanceof ArchiveFile)) + && !FileFactory.isExtensionArchive(ext) + && !(inputFile instanceof ArchiveFile)) || options.getDirGameSubdir() === GameSubdirMode.ALWAYS + || rom instanceof Disk ) { output = path.join(game.getName(), output); } @@ -509,7 +513,7 @@ export default class OutputFactory { inputFile: File, ): string { // Determine the output path of the file - if (options.shouldZipFile(rom.getName())) { + if (options.shouldZipRom(rom)) { // Should zip, generate the zip name from the game name return `${game.getName()}.zip`; } @@ -518,6 +522,7 @@ export default class OutputFactory { if (!(inputFile instanceof ArchiveEntry || inputFile instanceof ArchiveFile) || options.shouldExtract() + || rom instanceof Disk ) { // Should extract (if needed), generate the file name from the ROM name return romBasename; @@ -541,7 +546,7 @@ export default class OutputFactory { inputFile: File, ): string { const romBasename = this.getRomBasename(rom, inputFile); - if (!options.shouldZipFile(rom.getName())) { + if (!options.shouldZipRom(rom)) { return romBasename; } diff --git a/src/types/releaseCandidate.ts b/src/types/releaseCandidate.ts index 4fc9e7267..bac221c18 100644 --- a/src/types/releaseCandidate.ts +++ b/src/types/releaseCandidate.ts @@ -68,4 +68,17 @@ export default class ReleaseCandidate { isPatched(): boolean { return this.getRomsWithFiles().some((romWithFiles) => romWithFiles.getInputFile().getPatch()); } + + // Immutable setters + + withRomsWithFiles(romsWithFiles: ROMWithFiles[]): ReleaseCandidate { + if (romsWithFiles === this.romsWithFiles + || (romsWithFiles.length === this.romsWithFiles.length + && romsWithFiles.every((rwf, idx) => this.romsWithFiles[idx] === rwf) + ) + ) { + return this; + } + return new ReleaseCandidate(this.game, this.release, romsWithFiles); + } } diff --git a/src/types/romWithFiles.ts b/src/types/romWithFiles.ts index 16f40449b..d10eee8fd 100644 --- a/src/types/romWithFiles.ts +++ b/src/types/romWithFiles.ts @@ -29,4 +29,27 @@ export default class ROMWithFiles { getOutputFile(): File { return this.outputFile; } + + // Immutable setters + + withRom(rom: ROM): ROMWithFiles { + if (rom === this.rom) { + return this; + } + return new ROMWithFiles(rom, this.inputFile, this.outputFile); + } + + withInputFile(inputFile: File): ROMWithFiles { + if (inputFile === this.inputFile) { + return this; + } + return new ROMWithFiles(this.rom, inputFile, this.outputFile); + } + + withOutputFile(outputFile: File): ROMWithFiles { + if (outputFile === this.outputFile) { + return this; + } + return new ROMWithFiles(this.rom, this.inputFile, outputFile); + } } diff --git a/static/favicon.svg b/static/favicon.svg new file mode 100644 index 000000000..6a9b5bc44 --- /dev/null +++ b/static/favicon.svg @@ -0,0 +1,44 @@ + + + + diff --git a/static/logo-dark.svg b/static/logo-dark.svg new file mode 100644 index 000000000..62cd8658c --- /dev/null +++ b/static/logo-dark.svg @@ -0,0 +1,44 @@ + + + + diff --git a/static/logo-light.svg b/static/logo-light.svg new file mode 100644 index 000000000..668f93763 --- /dev/null +++ b/static/logo-light.svg @@ -0,0 +1,44 @@ + + + + diff --git a/static/netwaretcs-715.svg b/static/netwaretcs-715.svg new file mode 100644 index 000000000..8f5384c14 --- /dev/null +++ b/static/netwaretcs-715.svg @@ -0,0 +1,43 @@ + + + + diff --git a/test/console/progressBarCli.test.ts b/test/console/progressBarCli.test.ts index 2718f43c7..56ace4d35 100644 --- a/test/console/progressBarCli.test.ts +++ b/test/console/progressBarCli.test.ts @@ -7,274 +7,274 @@ import SingleBarFormatted from '../../src/console/singleBarFormatted.js'; import ProgressBarCLISpy from './progressBarCliSpy.js'; describe('reset', () => { - it('should change the value and total', async () => { + it('should change the value and total', () => { const spy = new ProgressBarCLISpy(); - const progressBar = await ProgressBarCLI.new(spy.getLogger(), 'name', stripAnsi(ProgressBarSymbol.DONE), 100); + const progressBar = ProgressBarCLI.new(spy.getLogger(), 'name', stripAnsi(ProgressBarSymbol.DONE), 100); - await progressBar.incrementDone(); - await progressBar.render(true); + progressBar.incrementDone(); + progressBar.render(true); expect(spy.getLastLine()).toMatch(new RegExp(`${stripAnsi(ProgressBarSymbol.DONE)} +name .* 1/100`)); - await progressBar.reset(20); + progressBar.reset(20); expect(spy.getLastLine()).toMatch(new RegExp(`${stripAnsi(ProgressBarSymbol.DONE)} +name .* 0/20`)); - await ProgressBarCLI.stop(); + ProgressBarCLI.stop(); }); }); describe('setSymbol', () => { - it('should change the symbol to empty', async () => { + it('should change the symbol to empty', () => { const spy = new ProgressBarCLISpy(); - const progressBar = await ProgressBarCLI.new(spy.getLogger(), 'name', stripAnsi(ProgressBarSymbol.DONE)); + const progressBar = ProgressBarCLI.new(spy.getLogger(), 'name', stripAnsi(ProgressBarSymbol.DONE)); - await progressBar.setSymbol(''); + progressBar.setSymbol(''); expect(spy.getLastLine()).toMatch(/^name/); - await ProgressBarCLI.stop(); + ProgressBarCLI.stop(); }); test.each( Object.keys(ProgressBarSymbol), - )('should change the symbol to non-empty; %s', async (symbol) => { + )('should change the symbol to non-empty; %s', (symbol) => { const spy = new ProgressBarCLISpy(); - const progressBar = await ProgressBarCLI.new(spy.getLogger(), 'name', stripAnsi(ProgressBarSymbol.DONE)); + const progressBar = ProgressBarCLI.new(spy.getLogger(), 'name', 'DEFAULT'); - await progressBar.setSymbol(symbol); + progressBar.setSymbol(symbol); expect(spy.getLastLine()).toMatch(new RegExp(`^${symbol} +name`)); - await ProgressBarCLI.stop(); + ProgressBarCLI.stop(); }); }); describe('incrementProgress', () => { - it('should increment once each time', async () => { + it('should increment once each time', () => { const spy = new ProgressBarCLISpy(); - const progressBar = await ProgressBarCLI.new(spy.getLogger(), 'name', stripAnsi(ProgressBarSymbol.DONE), 100); + const progressBar = ProgressBarCLI.new(spy.getLogger(), 'name', stripAnsi(ProgressBarSymbol.DONE), 100); expect(spy.getLastLine()).toMatch(new RegExp(`${stripAnsi(ProgressBarSymbol.DONE)} +name .* \\| ${SingleBarFormatted.BAR_INCOMPLETE_CHAR}+ \\| 0/100`)); - await progressBar.incrementProgress(); - await progressBar.render(true); + progressBar.incrementProgress(); + progressBar.render(true); expect(spy.getLastLine()).toMatch(new RegExp(`${stripAnsi(ProgressBarSymbol.DONE)} +name .* \\| ${SingleBarFormatted.BAR_IN_PROGRESS_CHAR}.* \\| 0/100`)); - await progressBar.incrementProgress(); - await progressBar.render(true); + progressBar.incrementProgress(); + progressBar.render(true); expect(spy.getLastLine()).toMatch(new RegExp(`${stripAnsi(ProgressBarSymbol.DONE)} +name .* \\| ${SingleBarFormatted.BAR_IN_PROGRESS_CHAR}.* \\| 0/100`)); - await ProgressBarCLI.stop(); + ProgressBarCLI.stop(); }); - it('should work with incrementDone', async () => { + it('should work with incrementDone', () => { const spy = new ProgressBarCLISpy(); - const progressBar = await ProgressBarCLI.new(spy.getLogger(), 'name', stripAnsi(ProgressBarSymbol.DONE), 100); + const progressBar = ProgressBarCLI.new(spy.getLogger(), 'name', stripAnsi(ProgressBarSymbol.DONE), 100); expect(spy.getLastLine()).toMatch(new RegExp(`${stripAnsi(ProgressBarSymbol.DONE)} +name .* \\| ${SingleBarFormatted.BAR_INCOMPLETE_CHAR}+ \\| 0/100`)); - await progressBar.incrementProgress(); - await progressBar.render(true); + progressBar.incrementProgress(); + progressBar.render(true); expect(spy.getLastLine()).toMatch(new RegExp(`${stripAnsi(ProgressBarSymbol.DONE)} +name .* \\| ${SingleBarFormatted.BAR_IN_PROGRESS_CHAR}.* \\| 0/100`)); - await progressBar.incrementDone(); - await progressBar.render(true); + progressBar.incrementDone(); + progressBar.render(true); expect(spy.getLastLine()).toMatch(new RegExp(`${stripAnsi(ProgressBarSymbol.DONE)} +name .* \\| [^${SingleBarFormatted.BAR_IN_PROGRESS_CHAR}].* \\| 1/100`)); - await progressBar.incrementProgress(); - await progressBar.render(true); + progressBar.incrementProgress(); + progressBar.render(true); expect(spy.getLastLine()).toMatch(new RegExp(`${stripAnsi(ProgressBarSymbol.DONE)} +name .* \\| ${SingleBarFormatted.BAR_IN_PROGRESS_CHAR}.* \\| 1/100`)); - await ProgressBarCLI.stop(); + ProgressBarCLI.stop(); }); }); describe('incrementDone', () => { - it('should increment once each time', async () => { + it('should increment once each time', () => { const spy = new ProgressBarCLISpy(); - const progressBar = await ProgressBarCLI.new(spy.getLogger(), 'name', stripAnsi(ProgressBarSymbol.DONE), 100); + const progressBar = ProgressBarCLI.new(spy.getLogger(), 'name', stripAnsi(ProgressBarSymbol.DONE), 100); - await progressBar.incrementDone(); - await progressBar.render(true); + progressBar.incrementDone(); + progressBar.render(true); expect(spy.getLastLine()).toMatch(new RegExp(`${stripAnsi(ProgressBarSymbol.DONE)} +name .* 1/100`)); - await progressBar.incrementDone(); - await progressBar.render(true); + progressBar.incrementDone(); + progressBar.render(true); expect(spy.getLastLine()).toMatch(new RegExp(`${stripAnsi(ProgressBarSymbol.DONE)} +name .* 2/100`)); - await ProgressBarCLI.stop(); + ProgressBarCLI.stop(); }); }); describe('update', () => { - it('should update the value each time', async () => { + it('should update the value each time', () => { const spy = new ProgressBarCLISpy(); - const progressBar = await ProgressBarCLI.new(spy.getLogger(), 'name', stripAnsi(ProgressBarSymbol.DONE), 100); + const progressBar = ProgressBarCLI.new(spy.getLogger(), 'name', stripAnsi(ProgressBarSymbol.DONE), 100); expect(spy.getLineCount()).toEqual(1); - await progressBar.update(8); - await progressBar.render(true); + progressBar.update(8); + progressBar.render(true); expect(spy.getLastLine()).toMatch(new RegExp(`${stripAnsi(ProgressBarSymbol.DONE)} +name .* 8/100`)); - await progressBar.update(32); - await progressBar.render(true); + progressBar.update(32); + progressBar.render(true); expect(spy.getLastLine()).toMatch(new RegExp(`${stripAnsi(ProgressBarSymbol.DONE)} +name .* 32/100`)); - await ProgressBarCLI.stop(); + ProgressBarCLI.stop(); }); }); describe('done', () => { - it('should update the symbol', async () => { + it('should update the symbol', () => { const spy = new ProgressBarCLISpy(); - const progressBar = await ProgressBarCLI.new(spy.getLogger(), 'name', stripAnsi(ProgressBarSymbol.DONE)); + const progressBar = ProgressBarCLI.new(spy.getLogger(), 'name', stripAnsi(ProgressBarSymbol.WAITING)); expect(spy.getLineCount()).toEqual(1); - await progressBar.done(); + progressBar.done(); expect(spy.getLineCount()).toEqual(3); expect(spy.getLastLine()).toMatch(new RegExp(`${stripAnsi(ProgressBarSymbol.DONE)} +name`)); - await ProgressBarCLI.stop(); + ProgressBarCLI.stop(); }); - it('should update the symbol and message', async () => { + it('should update the symbol and message', () => { const spy = new ProgressBarCLISpy(); - const progressBar = await ProgressBarCLI.new(spy.getLogger(), 'name', stripAnsi(ProgressBarSymbol.DONE)); + const progressBar = ProgressBarCLI.new(spy.getLogger(), 'name', stripAnsi(ProgressBarSymbol.WAITING)); expect(spy.getLineCount()).toEqual(1); - await progressBar.done('done message'); + progressBar.done('done message'); expect(spy.getLineCount()).toEqual(3); expect(spy.getLastLine()).toMatch(new RegExp(`${stripAnsi(ProgressBarSymbol.DONE)} +name .* done message$`)); - await ProgressBarCLI.stop(); + ProgressBarCLI.stop(); }); }); describe('logDebug', () => { - it('should log at the matching log level', async () => { + it('should log at the matching log level', () => { const spy = new ProgressBarCLISpy(LogLevel.DEBUG); - const progressBar = await ProgressBarCLI.new(spy.getLogger(), 'name', stripAnsi(ProgressBarSymbol.DONE)); + const progressBar = ProgressBarCLI.new(spy.getLogger(), 'name', stripAnsi(ProgressBarSymbol.DONE)); expect(spy.getLineCount()).toEqual(1); progressBar.logDebug('debug message'); - await progressBar.render(true); + progressBar.render(true); expect(spy.getLineCount()).toEqual(3); expect(spy.getLogLine()).toMatch(/DEBUG:.*debug message/); - await ProgressBarCLI.stop(); + ProgressBarCLI.stop(); }); - it('should not log at the matching log level', async () => { + it('should not log at the matching log level', () => { const spy = new ProgressBarCLISpy(LogLevel.DEBUG + 1); - const progressBar = await ProgressBarCLI.new(spy.getLogger(), 'name', stripAnsi(ProgressBarSymbol.DONE)); + const progressBar = ProgressBarCLI.new(spy.getLogger(), 'name', stripAnsi(ProgressBarSymbol.DONE)); expect(spy.getLineCount()).toEqual(1); progressBar.logDebug('debug message'); - await progressBar.render(true); + progressBar.render(true); expect(spy.getLogLine()).toBeUndefined(); - await ProgressBarCLI.stop(); + ProgressBarCLI.stop(); }); }); describe('logInfo', () => { - it('should log at the matching log level', async () => { + it('should log at the matching log level', () => { const spy = new ProgressBarCLISpy(LogLevel.INFO); - const progressBar = await ProgressBarCLI.new(spy.getLogger(), 'name', stripAnsi(ProgressBarSymbol.DONE)); + const progressBar = ProgressBarCLI.new(spy.getLogger(), 'name', stripAnsi(ProgressBarSymbol.DONE)); expect(spy.getLineCount()).toEqual(1); progressBar.logInfo('info message'); - await progressBar.render(true); + progressBar.render(true); expect(spy.getLineCount()).toEqual(3); expect(spy.getLogLine()).toMatch(/INFO:.*info message/); - await ProgressBarCLI.stop(); + ProgressBarCLI.stop(); }); - it('should not log at the matching log level', async () => { + it('should not log at the matching log level', () => { const spy = new ProgressBarCLISpy(LogLevel.INFO + 1); - const progressBar = await ProgressBarCLI.new(spy.getLogger(), 'name', stripAnsi(ProgressBarSymbol.DONE)); + const progressBar = ProgressBarCLI.new(spy.getLogger(), 'name', stripAnsi(ProgressBarSymbol.DONE)); expect(spy.getLineCount()).toEqual(1); progressBar.logInfo('info message'); - await progressBar.render(true); + progressBar.render(true); expect(spy.getLogLine()).toBeUndefined(); - await ProgressBarCLI.stop(); + ProgressBarCLI.stop(); }); }); describe('logWarn', () => { - it('should log at the matching log level', async () => { + it('should log at the matching log level', () => { const spy = new ProgressBarCLISpy(LogLevel.WARN); - const progressBar = await ProgressBarCLI.new(spy.getLogger(), 'name', stripAnsi(ProgressBarSymbol.DONE)); + const progressBar = ProgressBarCLI.new(spy.getLogger(), 'name', stripAnsi(ProgressBarSymbol.DONE)); expect(spy.getLineCount()).toEqual(1); progressBar.logWarn('warn message'); - await progressBar.render(true); + progressBar.render(true); expect(spy.getLineCount()).toEqual(3); expect(spy.getLogLine()).toMatch(/WARN:.*warn message/); - await ProgressBarCLI.stop(); + ProgressBarCLI.stop(); }); - it('should not log at the matching log level', async () => { + it('should not log at the matching log level', () => { const spy = new ProgressBarCLISpy(LogLevel.WARN + 1); - const progressBar = await ProgressBarCLI.new(spy.getLogger(), 'name', stripAnsi(ProgressBarSymbol.DONE)); + const progressBar = ProgressBarCLI.new(spy.getLogger(), 'name', stripAnsi(ProgressBarSymbol.DONE)); expect(spy.getLineCount()).toEqual(1); progressBar.logWarn('warn message'); - await progressBar.render(true); + progressBar.render(true); expect(spy.getLogLine()).toBeUndefined(); - await ProgressBarCLI.stop(); + ProgressBarCLI.stop(); }); }); describe('logError', () => { - it('should log at the matching log level', async () => { + it('should log at the matching log level', () => { const spy = new ProgressBarCLISpy(LogLevel.ERROR); - const progressBar = await ProgressBarCLI.new(spy.getLogger(), 'name', stripAnsi(ProgressBarSymbol.DONE)); + const progressBar = ProgressBarCLI.new(spy.getLogger(), 'name', stripAnsi(ProgressBarSymbol.DONE)); expect(spy.getLineCount()).toEqual(1); progressBar.logError('error message'); - await progressBar.render(true); + progressBar.render(true); expect(spy.getLineCount()).toEqual(3); expect(spy.getLogLine()).toMatch(/ERROR:.*error message/); - await ProgressBarCLI.stop(); + ProgressBarCLI.stop(); }); - it('should not log at the matching log level', async () => { + it('should not log at the matching log level', () => { const spy = new ProgressBarCLISpy(LogLevel.ERROR + 1); - const progressBar = await ProgressBarCLI.new(spy.getLogger(), 'name', stripAnsi(ProgressBarSymbol.DONE)); + const progressBar = ProgressBarCLI.new(spy.getLogger(), 'name', stripAnsi(ProgressBarSymbol.DONE)); expect(spy.getLineCount()).toEqual(1); progressBar.logError('error message'); - await progressBar.render(true); + progressBar.render(true); expect(spy.getLogLine()).toBeUndefined(); - await ProgressBarCLI.stop(); + ProgressBarCLI.stop(); }); }); describe('freeze', () => { - it('should freeze the single bar', async () => { + it('should freeze the single bar', () => { const spy = new ProgressBarCLISpy(); - const progressBar = await ProgressBarCLI.new(spy.getLogger(), '', stripAnsi(ProgressBarSymbol.DONE)); + const progressBar = ProgressBarCLI.new(spy.getLogger(), '', stripAnsi(ProgressBarSymbol.DONE)); expect(spy.getLineCount()).toEqual(1); - await progressBar.freeze(); + progressBar.freeze(); expect(spy.getLineCount()).toEqual(3); // one final render, and then a log of the render - await ProgressBarCLI.stop(); + ProgressBarCLI.stop(); }); }); describe('delete', () => { - it('should delete the single bar', async () => { + it('should delete the single bar', () => { const spy = new ProgressBarCLISpy(); - const progressBar = await ProgressBarCLI.new(spy.getLogger(), '', stripAnsi(ProgressBarSymbol.DONE)); + const progressBar = ProgressBarCLI.new(spy.getLogger(), '', stripAnsi(ProgressBarSymbol.DONE)); expect(spy.getLineCount()).toEqual(1); progressBar.delete(); expect(spy.getLineCount()).toEqual(1); - await ProgressBarCLI.stop(); + ProgressBarCLI.stop(); }); }); diff --git a/test/console/progressBarFake.ts b/test/console/progressBarFake.ts index 94f5b33b1..eb5fdbe6c 100644 --- a/test/console/progressBarFake.ts +++ b/test/console/progressBarFake.ts @@ -5,51 +5,31 @@ import ProgressBar from '../../src/console/progressBar.js'; export default class ProgressBarFake extends ProgressBar { delete(): void {} - async done(): Promise { - return Promise.resolve(); - } + done(): void {} addWaitingMessage(): void {} - async removeWaitingMessage(): Promise { - return Promise.resolve(); - } + removeWaitingMessage(): void {} - async incrementTotal(): Promise { - return Promise.resolve(); - } + incrementTotal(): void {} - async incrementProgress(): Promise { - return Promise.resolve(); - } + incrementProgress(): void {} - async incrementDone(): Promise { - return Promise.resolve(); - } + incrementDone(): void {} - withLoggerPrefix(): ProgressBar { + setLoggerPrefix(): ProgressBar { return this; } log(): void {} - async reset(): Promise { - return Promise.resolve(); - } + reset(): void {} - async setName(): Promise { - return Promise.resolve(); - } + setName(): void {} - async setSymbol(): Promise { - return Promise.resolve(); - } + setSymbol(): void {} - async freeze(): Promise { - return Promise.resolve(); - } + freeze(): void {} - async update(): Promise { - return Promise.resolve(); - } + update(): void {} } diff --git a/test/console/singleBarFormatted.test.ts b/test/console/singleBarFormatted.test.ts index 560d5d638..49c4ff626 100644 --- a/test/console/singleBarFormatted.test.ts +++ b/test/console/singleBarFormatted.test.ts @@ -18,9 +18,11 @@ function testSingleBarFormatted( }); const singleBarFormatted = new SingleBarFormatted(multiBar, initialTotal, initialPayload); - callback(singleBarFormatted); - - multiBar.stop(); + try { + callback(singleBarFormatted); + } finally { + multiBar.stop(); + } } describe('getSingleBar', () => { @@ -42,20 +44,20 @@ describe('getLastOutput', () => { testSingleBarFormatted(100, {}, (singleBarFormatted) => { singleBarFormatted.getSingleBar().render(); - expect(singleBarFormatted.getLastOutput()).toEqual('ยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยท | โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘ | 0/100'); + expect(singleBarFormatted.getLastOutput()).toEqual('ยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยท | โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘ | 0/100'); }); }); }); describe('format', () => { test.each([ - [{}, 'ยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยท | โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘ | 1/100'], - [{ symbol: '@' }, '@ ยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยท | โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘ | 1/100'], - [{ symbol: '@', name: 'name' }, '@ name ยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยท | โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘ | 1/100'], - [{ name: 'name' }, 'name ยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยท | โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘ | 1/100'], - [{ name: 'name', waitingMessage: 'waiting' }, 'name ยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยท | โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘ | 1/100 | waiting'], - [{ name: 'name', finishedMessage: 'done' }, 'name ยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยท | done'], - [{ name: 'name', finishedMessage: 'done', waitingMessage: 'waiting' }, 'name ยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยท | done'], + [{}, 'ยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยท | โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘ | 1/100'], + [{ symbol: '@' }, '@ ยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยท | โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘ | 1/100'], + [{ symbol: '@', name: 'name' }, '@ name ยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยท | โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘ | 1/100'], + [{ name: 'name' }, 'name ยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยท | โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘ | 1/100'], + [{ name: 'name', waitingMessage: 'waiting' }, 'name ยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยท | โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘ | 1/100 | waiting'], + [{ name: 'name', finishedMessage: 'done' }, 'name ยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยท | done'], + [{ name: 'name', finishedMessage: 'done', waitingMessage: 'waiting' }, 'name ยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยทยท | done'], ] satisfies [ProgressBarPayload, string][])('should: %s', (payload, expected) => { testSingleBarFormatted(100, {}, (singleBarFormatted) => { singleBarFormatted.getSingleBar().increment(); diff --git a/test/driveSemaphore.test.ts b/test/driveSemaphore.test.ts index d436d7ed4..91a408f3c 100644 --- a/test/driveSemaphore.test.ts +++ b/test/driveSemaphore.test.ts @@ -3,7 +3,7 @@ import DriveSemaphore from '../src/driveSemaphore.js'; describe('map', () => { it('should handle thrown errors', async () => { await expect( - new DriveSemaphore().map( + new DriveSemaphore(1).map( ['file'], () => { throw new Error('error'); }, ), @@ -12,7 +12,7 @@ describe('map', () => { it('should handle thrown literals', async () => { await expect( - new DriveSemaphore().map( + new DriveSemaphore(1).map( ['file'], // eslint-disable-next-line @typescript-eslint/no-throw-literal () => { throw 'message'; }, diff --git a/test/endToEndTest.sh b/test/endToEndTest.sh index 5727ab66e..94bf4ee84 100755 --- a/test/endToEndTest.sh +++ b/test/endToEndTest.sh @@ -1,9 +1,8 @@ #!/usr/bin/env bash set -euo pipefail -here="${PWD}" # shellcheck disable=SC2064 -trap "cd \"${here}\"" EXIT +trap "cd \"${PWD}\"" EXIT # @param {string} $1 Directory to start from # @param {string} $2 Filename @@ -24,14 +23,17 @@ parent_find() { cd "$(parent_find . "package.json")" test_igir() { + echo "--------------------------------------------------" temp="$(mktemp -d)" ./dist/index.js "$@" \ --dat test/fixtures/dats/* \ --input test/fixtures/roms/* \ + --input-exclude test/fixtures/roms/discs/* \ --output "${temp}" - ls "${temp}"/* &> /dev/null + ls -al "${temp}"/* rm -rf "${temp}" } test_igir copy test +test_igir copy extract test test_igir copy zip test diff --git a/test/fixtures/dats/one.dat b/test/fixtures/dats/one.dat index beb7461a8..7f25415ef 100644 --- a/test/fixtures/dats/one.dat +++ b/test/fixtures/dats/one.dat @@ -42,8 +42,34 @@ + + Device + + + CD-ROM + + + + + + + GD-ROM + + + + + + + + UMD + + + + GameCube NKit ISO + + diff --git a/test/fixtures/roms/chd/2048.chd b/test/fixtures/roms/chd/2048.chd new file mode 100644 index 000000000..2fb9770e6 Binary files /dev/null and b/test/fixtures/roms/chd/2048.chd differ diff --git a/test/fixtures/roms/chd/4096.chd b/test/fixtures/roms/chd/4096.chd new file mode 100644 index 000000000..d7db6f941 Binary files /dev/null and b/test/fixtures/roms/chd/4096.chd differ diff --git a/test/fixtures/roms/chd/CD-ROM.chd b/test/fixtures/roms/chd/CD-ROM.chd new file mode 100644 index 000000000..8c240e5a4 Binary files /dev/null and b/test/fixtures/roms/chd/CD-ROM.chd differ diff --git a/test/fixtures/roms/chd/GD-ROM.chd b/test/fixtures/roms/chd/GD-ROM.chd new file mode 100644 index 000000000..1910fe41e Binary files /dev/null and b/test/fixtures/roms/chd/GD-ROM.chd differ diff --git a/test/fixtures/roms/cso/UMD.cso b/test/fixtures/roms/cso/UMD.cso new file mode 100644 index 000000000..0f734ace3 Binary files /dev/null and b/test/fixtures/roms/cso/UMD.cso differ diff --git a/test/fixtures/roms/cso/UMD.zso b/test/fixtures/roms/cso/UMD.zso new file mode 100644 index 000000000..b1c07832b Binary files /dev/null and b/test/fixtures/roms/cso/UMD.zso differ diff --git a/test/fixtures/roms/discs/CD-ROM (Track 1).bin b/test/fixtures/roms/discs/CD-ROM (Track 1).bin new file mode 100644 index 000000000..0c78c1fbc Binary files /dev/null and b/test/fixtures/roms/discs/CD-ROM (Track 1).bin differ diff --git a/test/fixtures/roms/discs/CD-ROM (Track 2).bin b/test/fixtures/roms/discs/CD-ROM (Track 2).bin new file mode 100644 index 000000000..573bd2f9c Binary files /dev/null and b/test/fixtures/roms/discs/CD-ROM (Track 2).bin differ diff --git a/test/fixtures/roms/discs/CD-ROM (Track 3).bin b/test/fixtures/roms/discs/CD-ROM (Track 3).bin new file mode 100644 index 000000000..06d86590d Binary files /dev/null and b/test/fixtures/roms/discs/CD-ROM (Track 3).bin differ diff --git a/test/fixtures/roms/discs/CD-ROM.cue b/test/fixtures/roms/discs/CD-ROM.cue new file mode 100644 index 000000000..151aad5a0 --- /dev/null +++ b/test/fixtures/roms/discs/CD-ROM.cue @@ -0,0 +1,11 @@ +FILE "CD-ROM (Track 1).bin" BINARY + TRACK 01 MODE2/2352 + INDEX 01 00:00:00 +FILE "CD-ROM (Track 2).bin" BINARY + TRACK 02 AUDIO + INDEX 00 00:00:00 + INDEX 01 00:02:00 +FILE "CD-ROM (Track 3).bin" BINARY + TRACK 03 AUDIO + INDEX 00 00:00:00 + INDEX 01 00:02:00 diff --git a/test/fixtures/roms/discs/GD-ROM.gdi b/test/fixtures/roms/discs/GD-ROM.gdi new file mode 100644 index 000000000..bf32b2433 --- /dev/null +++ b/test/fixtures/roms/discs/GD-ROM.gdi @@ -0,0 +1,5 @@ +4 +1 0 4 2352 track01.bin 0 +2 756 0 2352 track02.raw 0 +3 45000 4 2352 track03.bin 0 +4 158259 4 2352 track04.bin 0 diff --git a/test/fixtures/roms/discs/UMD.iso b/test/fixtures/roms/discs/UMD.iso new file mode 100644 index 000000000..93333c28b Binary files /dev/null and b/test/fixtures/roms/discs/UMD.iso differ diff --git a/test/fixtures/roms/discs/track01.bin b/test/fixtures/roms/discs/track01.bin new file mode 100644 index 000000000..dbf01346c Binary files /dev/null and b/test/fixtures/roms/discs/track01.bin differ diff --git a/test/fixtures/roms/discs/track02.raw b/test/fixtures/roms/discs/track02.raw new file mode 100644 index 000000000..325f2bac6 Binary files /dev/null and b/test/fixtures/roms/discs/track02.raw differ diff --git a/test/fixtures/roms/discs/track03.bin b/test/fixtures/roms/discs/track03.bin new file mode 100644 index 000000000..a6acad8e1 Binary files /dev/null and b/test/fixtures/roms/discs/track03.bin differ diff --git a/test/fixtures/roms/discs/track04.bin b/test/fixtures/roms/discs/track04.bin new file mode 100644 index 000000000..f9e117234 Binary files /dev/null and b/test/fixtures/roms/discs/track04.bin differ diff --git a/test/fixtures/roms/nkit/5bc2ce5b.nkit.iso b/test/fixtures/roms/nkit/5bc2ce5b.nkit.iso new file mode 100644 index 000000000..469861d4b Binary files /dev/null and b/test/fixtures/roms/nkit/5bc2ce5b.nkit.iso differ diff --git a/test/igir.test.ts b/test/igir.test.ts index 9512905ae..7784411bc 100644 --- a/test/igir.test.ts +++ b/test/igir.test.ts @@ -8,9 +8,15 @@ import Igir from '../src/igir.js'; import DATScanner from '../src/modules/datScanner.js'; import ArrayPoly from '../src/polyfill/arrayPoly.js'; import fsPoly from '../src/polyfill/fsPoly.js'; +import FileCache from '../src/types/files/fileCache.js'; import { ChecksumBitmask } from '../src/types/files/fileChecksums.js'; import FileFactory from '../src/types/files/fileFactory.js'; -import Options, { FixExtension, GameSubdirMode, OptionsProps } from '../src/types/options.js'; +import Options, { + FixExtension, + GameSubdirMode, + InputChecksumArchivesMode, + OptionsProps, +} from '../src/types/options.js'; import ProgressBarFake from './console/progressBarFake.js'; interface TestOutput { @@ -61,7 +67,7 @@ async function walkWithCrc(inputDir: string, outputDir: string): Promise { try { - return await FileFactory.filesFrom(filePath); + return await new FileFactory(new FileCache()).filesFrom(filePath); } catch { return []; } @@ -143,16 +149,20 @@ describe('with explicit DATs', () => { }), new Logger(LogLevel.NEVER)).main()).rejects.toThrow(/no valid dat files/i); }); - it('should copy and test', async () => { + it('should copy and test, without caching', async () => { await copyFixturesToTemp(async (inputTemp, outputTemp) => { const result = await runIgir({ commands: ['copy', 'test'], dat: [path.join(inputTemp, 'dats', '*')], input: [path.join(inputTemp, 'roms')], + inputExclude: [path.join(inputTemp, 'roms', 'discs')], // test archive scanning + matching + inputChecksumArchives: InputChecksumArchivesMode[InputChecksumArchivesMode.NEVER] + .toLowerCase(), output: outputTemp, dirDatName: true, dirGameSubdir: GameSubdirMode[GameSubdirMode.MULTIPLE].toLowerCase(), fixExtension: FixExtension[FixExtension.AUTO].toLowerCase(), + disableCache: true, }); expect(result.outputFilesAndCrcs).toEqual([ @@ -165,15 +175,26 @@ describe('with explicit DATs', () => { [path.join('Headerless', 'allpads.nes'), '6339abe6'], [path.join('Headerless', 'color_test.nes'), 'c9c1b7aa'], [`${path.join('Headerless', 'speed_test_v51.sfc.gz')}|speed_test_v51.sfc`, '8beffd94'], + [`${path.join('One', 'CD-ROM.chd')}|CD-ROM (Track 1).bin`, '49ca35fb'], + [`${path.join('One', 'CD-ROM.chd')}|CD-ROM (Track 2).bin`, '0316f720'], + [`${path.join('One', 'CD-ROM.chd')}|CD-ROM (Track 3).bin`, 'a320af40'], + [`${path.join('One', 'CD-ROM.chd')}|CD-ROM.cue`, 'xxxxxxxx'], [path.join('One', 'Fizzbuzz.nes'), '370517b5'], [path.join('One', 'Foobar.lnx'), 'b22c9747'], - [`${path.join('One', 'Lorem Ipsum.zip')}|loremipsum.rom`, '70856527'], - [`${path.join('One', 'One Three.zip')}|${path.join('1', 'one.rom')}`, 'f817a89f'], - [`${path.join('One', 'One Three.zip')}|${path.join('2', 'two.rom')}`, '96170874'], - [`${path.join('One', 'One Three.zip')}|${path.join('3', 'three.rom')}`, 'ff46c5d8'], + [`${path.join('One', 'GameCube NKit ISO.nkit.iso')}|GameCube NKit ISO.iso`, '5bc2ce5b'], + [`${path.join('One', 'GD-ROM.chd')}|GD-ROM.gdi`, 'f16f621c'], + [`${path.join('One', 'GD-ROM.chd')}|track01.bin`, '9796ed9a'], + [`${path.join('One', 'GD-ROM.chd')}|track02.raw`, 'abc178d5'], + [`${path.join('One', 'GD-ROM.chd')}|track03.bin`, '61a363f1'], + [`${path.join('One', 'GD-ROM.chd')}|track04.bin`, 'fc5ff5a0'], + [path.join('One', 'One Three', 'One.rom'), 'f817a89f'], + [path.join('One', 'One Three', 'Three.rom'), 'ff46c5d8'], + [`${path.join('One', 'Three Four Five', '2048')}|`, 'xxxxxxxx'], // hard disk + [`${path.join('One', 'Three Four Five', '4096')}|`, 'xxxxxxxx'], // hard disk [path.join('One', 'Three Four Five', 'Five.rom'), '3e5daf67'], [path.join('One', 'Three Four Five', 'Four.rom'), '1cf3ca74'], [path.join('One', 'Three Four Five', 'Three.rom'), 'ff46c5d8'], + [`${path.join('One', 'UMD.cso')}|UMD.iso`, 'e90f7cf5'], [path.join('Patchable', '0F09A40.rom'), '2f943e86'], [path.join('Patchable', '3708F2C.rom'), '20891c9f'], [path.join('Patchable', '612644F.rom'), 'f7591b29'], @@ -196,28 +217,43 @@ describe('with explicit DATs', () => { }); }); - it('should copy a 1G1R set', async () => { + it('should copy a 1G1R set, with a custom cache path', async () => { await copyFixturesToTemp(async (inputTemp, outputTemp) => { const result = await runIgir({ commands: ['copy'], dat: [path.join(inputTemp, 'dats', 'one.dat')], input: [path.join(inputTemp, 'roms')], + inputChecksumArchives: InputChecksumArchivesMode[InputChecksumArchivesMode.ALWAYS] + .toLowerCase(), output: outputTemp, dirGameSubdir: GameSubdirMode[GameSubdirMode.MULTIPLE].toLowerCase(), single: true, preferParent: true, + cachePath: inputTemp, }); expect(result.outputFilesAndCrcs).toEqual([ + ['CD-ROM.chd|CD-ROM (Track 1).bin', '49ca35fb'], + ['CD-ROM.chd|CD-ROM (Track 2).bin', '0316f720'], + ['CD-ROM.chd|CD-ROM (Track 3).bin', 'a320af40'], + ['CD-ROM.chd|CD-ROM.cue', 'xxxxxxxx'], // Fizzbuzz.nes is explicitly missing! ['Foobar.lnx', 'b22c9747'], + ['GameCube NKit ISO.nkit.iso|GameCube NKit ISO.iso', '5bc2ce5b'], + ['GD-ROM.chd|GD-ROM.gdi', 'f16f621c'], + ['GD-ROM.chd|track01.bin', '9796ed9a'], + ['GD-ROM.chd|track02.raw', 'abc178d5'], + ['GD-ROM.chd|track03.bin', '61a363f1'], + ['GD-ROM.chd|track04.bin', 'fc5ff5a0'], ['Lorem Ipsum.zip|loremipsum.rom', '70856527'], - [`${path.join('One Three.zip')}|${path.join('1', 'one.rom')}`, 'f817a89f'], - [`${path.join('One Three.zip')}|${path.join('2', 'two.rom')}`, '96170874'], - [`${path.join('One Three.zip')}|${path.join('3', 'three.rom')}`, 'ff46c5d8'], + [path.join('One Three', 'One.rom'), 'f817a89f'], + [path.join('One Three', 'Three.rom'), 'ff46c5d8'], + [`${path.join('Three Four Five', '2048')}|`, 'xxxxxxxx'], // hard disk + [`${path.join('Three Four Five', '4096')}|`, 'xxxxxxxx'], // hard disk [path.join('Three Four Five', 'Five.rom'), '3e5daf67'], [path.join('Three Four Five', 'Four.rom'), '1cf3ca74'], [path.join('Three Four Five', 'Three.rom'), 'ff46c5d8'], + ['UMD.iso', 'e90f7cf5'], ]); expect(result.cwdFilesAndCrcs).toHaveLength(0); expect(result.movedFiles).toHaveLength(0); @@ -232,7 +268,7 @@ describe('with explicit DATs', () => { path.join(outputTemp, 'one.rom'), path.join(outputTemp, 'rom', 'two.rom'), path.join(outputTemp, 'zip', 'three.zip'), - path.join(outputTemp, 'iso', 'four.iso'), + path.join(outputTemp, 'cso', 'four.rvz'), ]; await Promise.all(junkFiles.map(async (junkFile) => { await fsPoly.touch(junkFile); @@ -254,10 +290,23 @@ describe('with explicit DATs', () => { }); expect(result.outputFilesAndCrcs).toEqual([ + [`${path.join('-', 'One', 'Three Four Five', '2048')}|`, 'xxxxxxxx'], // hard disk + [`${path.join('-', 'One', 'Three Four Five', '4096')}|`, 'xxxxxxxx'], // hard disk [`${path.join('7z', 'Headered', 'diagnostic_test_cartridge.a78.7z')}|diagnostic_test_cartridge.a78`, 'f6cc9b1c'], + [`${path.join('chd', 'One', 'CD-ROM.chd')}|CD-ROM (Track 1).bin`, '49ca35fb'], + [`${path.join('chd', 'One', 'CD-ROM.chd')}|CD-ROM (Track 2).bin`, '0316f720'], + [`${path.join('chd', 'One', 'CD-ROM.chd')}|CD-ROM (Track 3).bin`, 'a320af40'], + [`${path.join('chd', 'One', 'CD-ROM.chd')}|CD-ROM.cue`, 'xxxxxxxx'], + [`${path.join('chd', 'One', 'GD-ROM.chd')}|GD-ROM.gdi`, 'f16f621c'], + [`${path.join('chd', 'One', 'GD-ROM.chd')}|track01.bin`, '9796ed9a'], + [`${path.join('chd', 'One', 'GD-ROM.chd')}|track02.raw`, 'abc178d5'], + [`${path.join('chd', 'One', 'GD-ROM.chd')}|track03.bin`, '61a363f1'], + [`${path.join('chd', 'One', 'GD-ROM.chd')}|track04.bin`, 'fc5ff5a0'], + [path.join('cso', 'four.rvz'), '00000000'], // explicitly not deleted, there were no input files with the extension "cso" [`${path.join('gz', 'Headerless', 'speed_test_v51.sfc.gz')}|speed_test_v51.sfc`, '8beffd94'], [`${path.join('gz', 'Patchable', 'Best.gz')}|best.rom`, '1e3d78cf'], - [path.join('iso', 'four.iso'), '00000000'], // explicitly not deleted, there were no input files with the extension "iso" + [`${path.join('iso', 'One', 'GameCube NKit ISO.nkit.iso')}|GameCube NKit ISO.iso`, '5bc2ce5b'], + [path.join('iso', 'One', 'UMD.iso'), 'e90f7cf5'], [path.join('lnx', 'One', 'Foobar.lnx'), 'b22c9747'], [path.join('lnx', 'smdb', 'Hardware Target Game Database', 'Dummy', 'Foobar.lnx'), 'b22c9747'], [path.join('nes', 'Headered', 'allpads.nes'), '9180a163'], @@ -268,6 +317,8 @@ describe('with explicit DATs', () => { [path.join('nes', 'smdb', 'Hardware Target Game Database', 'Dummy', 'Fizzbuzz.nes'), '370517b5'], ['one.rom', '00000000'], // explicitly not deleted, it is not in an extension subdirectory [`${path.join('rar', 'Headered', 'LCDTestROM.lnx.rar')}|LCDTestROM.lnx`, '2d251538'], + [path.join('rom', 'One', 'One Three', 'One.rom'), 'f817a89f'], + [path.join('rom', 'One', 'One Three', 'Three.rom'), 'ff46c5d8'], [path.join('rom', 'One', 'Three Four Five', 'Five.rom'), '3e5daf67'], [path.join('rom', 'One', 'Three Four Five', 'Four.rom'), '1cf3ca74'], [path.join('rom', 'One', 'Three Four Five', 'Three.rom'), 'ff46c5d8'], @@ -286,9 +337,6 @@ describe('with explicit DATs', () => { [path.join('smc', 'Headered', 'speed_test_v51.smc'), '9adca6cc'], [`${path.join('zip', 'Headered', 'fds_joypad_test.fds.zip')}|fds_joypad_test.fds`, '1e58456d'], [`${path.join('zip', 'One', 'Lorem Ipsum.zip')}|loremipsum.rom`, '70856527'], - [`${path.join('zip', 'One', 'One Three.zip')}|${path.join('1', 'one.rom')}`, 'f817a89f'], - [`${path.join('zip', 'One', 'One Three.zip')}|${path.join('2', 'two.rom')}`, '96170874'], - [`${path.join('zip', 'One', 'One Three.zip')}|${path.join('3', 'three.rom')}`, 'ff46c5d8'], ]); expect(result.cwdFilesAndCrcs).toHaveLength(0); expect(result.movedFiles).toHaveLength(0); @@ -317,6 +365,7 @@ describe('with explicit DATs', () => { output: outputTemp, dirDatName: true, dirGameSubdir: GameSubdirMode[GameSubdirMode.MULTIPLE].toLowerCase(), + excludeDisks: true, }); expect(result.outputFilesAndCrcs).toEqual([ @@ -355,6 +404,7 @@ describe('with explicit DATs', () => { output: outputTemp, dirDatName: true, dirGameSubdir: GameSubdirMode[GameSubdirMode.MULTIPLE].toLowerCase(), + excludeDisks: true, }); expect(result.outputFilesAndCrcs).toEqual([ @@ -382,6 +432,7 @@ describe('with explicit DATs', () => { dat: [path.join(inputTemp, 'dats', '*')], datExclude: [path.join(inputTemp, 'dats', 'headerless.*')], input: [path.join(inputTemp, 'roms')], + inputExclude: [path.join(inputTemp, 'roms', 'discs')], // test archive scanning + matching output: outputTemp, datCombine: true, dirDatName: true, @@ -404,6 +455,11 @@ describe('with explicit DATs', () => { [path.join('igir combined', 'fds_joypad_test.fds'), '1e58456d'], [path.join('igir combined', 'Fizzbuzz.nes'), '370517b5'], [path.join('igir combined', 'Foobar.lnx'), 'b22c9747'], + [path.join('igir combined', 'GD-ROM', 'GD-ROM.gdi'), 'f16f621c'], + [path.join('igir combined', 'GD-ROM', 'track01.bin'), '9796ed9a'], + [path.join('igir combined', 'GD-ROM', 'track02.raw'), 'abc178d5'], + [path.join('igir combined', 'GD-ROM', 'track03.bin'), '61a363f1'], + [path.join('igir combined', 'GD-ROM', 'track04.bin'), 'fc5ff5a0'], [path.join('igir combined', 'Hardware Target Game Database', 'Dummy', 'Fizzbuzz.nes'), '370517b5'], [path.join('igir combined', 'Hardware Target Game Database', 'Dummy', 'Foobar.lnx'), 'b22c9747'], [path.join('igir combined', 'Hardware Target Game Database', 'Dummy', 'Lorem Ipsum.rom'), '70856527'], @@ -416,13 +472,20 @@ describe('with explicit DATs', () => { [path.join('igir combined', 'One Three', 'One.rom'), 'f817a89f'], [path.join('igir combined', 'One Three', 'Three.rom'), 'ff46c5d8'], [path.join('igir combined', 'speed_test_v51.smc'), '9adca6cc'], + [`${path.join('igir combined', 'Three Four Five', '2048')}|`, 'xxxxxxxx'], // hard disk + [`${path.join('igir combined', 'Three Four Five', '4096')}|`, 'xxxxxxxx'], // hard disk [path.join('igir combined', 'Three Four Five', 'Five.rom'), '3e5daf67'], [path.join('igir combined', 'Three Four Five', 'Four.rom'), '1cf3ca74'], [path.join('igir combined', 'Three Four Five', 'Three.rom'), 'ff46c5d8'], + [path.join('igir combined', 'UMD.iso'), 'e90f7cf5'], ]); expect(result.cwdFilesAndCrcs).toHaveLength(0); expect(result.movedFiles).toEqual([ - path.join('foobar.lnx'), + path.join('chd', '2048.chd'), + path.join('chd', '4096.chd'), + path.join('chd', 'GD-ROM.chd'), + path.join('cso', 'UMD.cso'), + 'foobar.lnx', path.join('headered', 'LCDTestROM.lnx.rar'), path.join('headered', 'allpads.nes'), path.join('headered', 'color_test.nintendoentertainmentsystem'), @@ -484,7 +547,7 @@ describe('with explicit DATs', () => { }); }); - it('should move zipped files', async () => { + it('should move zipped files, allowing excess sets', async () => { await copyFixturesToTemp(async (inputTemp, outputTemp) => { const result = await runIgir({ commands: ['move'], @@ -492,6 +555,7 @@ describe('with explicit DATs', () => { input: [path.join(inputTemp, 'roms', 'zip')], output: outputTemp, dirDatName: true, + allowExcessSets: true, }); expect(result.outputFilesAndCrcs).toEqual([ @@ -545,14 +609,26 @@ describe('with explicit DATs', () => { [`${path.join('Headerless', 'fds_joypad_test.zip')}|fds_joypad_test.fds`, '3ecbac61'], [`${path.join('Headerless', 'LCDTestROM.zip')}|LCDTestROM.lyx`, '42583855'], [`${path.join('Headerless', 'speed_test_v51.zip')}|speed_test_v51.sfc`, '8beffd94'], + [`${path.join('One', 'CD-ROM.zip')}|CD-ROM (Track 1).bin`, '49ca35fb'], + [`${path.join('One', 'CD-ROM.zip')}|CD-ROM (Track 2).bin`, '0316f720'], + [`${path.join('One', 'CD-ROM.zip')}|CD-ROM (Track 3).bin`, 'a320af40'], + [`${path.join('One', 'CD-ROM.zip')}|CD-ROM.cue`, '4ce39e73'], [`${path.join('One', 'Fizzbuzz.zip')}|Fizzbuzz.nes`, '370517b5'], [`${path.join('One', 'Foobar.zip')}|Foobar.lnx`, 'b22c9747'], + [`${path.join('One', 'GD-ROM.zip')}|GD-ROM.gdi`, 'f16f621c'], + [`${path.join('One', 'GD-ROM.zip')}|track01.bin`, '9796ed9a'], + [`${path.join('One', 'GD-ROM.zip')}|track02.raw`, 'abc178d5'], + [`${path.join('One', 'GD-ROM.zip')}|track03.bin`, '61a363f1'], + [`${path.join('One', 'GD-ROM.zip')}|track04.bin`, 'fc5ff5a0'], [`${path.join('One', 'Lorem Ipsum.zip')}|Lorem Ipsum.zip`, '7ee77289'], [`${path.join('One', 'One Three.zip')}|One.rom`, 'f817a89f'], [`${path.join('One', 'One Three.zip')}|Three.rom`, 'ff46c5d8'], [`${path.join('One', 'Three Four Five.zip')}|Five.rom`, '3e5daf67'], [`${path.join('One', 'Three Four Five.zip')}|Four.rom`, '1cf3ca74'], [`${path.join('One', 'Three Four Five.zip')}|Three.rom`, 'ff46c5d8'], + [`${path.join('One', 'Three Four Five', '2048')}|`, 'xxxxxxxx'], // hard disk + [`${path.join('One', 'Three Four Five', '4096')}|`, 'xxxxxxxx'], // hard disk + [`${path.join('One', 'UMD.zip')}|UMD.iso`, 'e90f7cf5'], [`${path.join('Patchable', '0F09A40.zip')}|0F09A40.rom`, '2f943e86'], [`${path.join('Patchable', '3708F2C.zip')}|3708F2C.rom`, '20891c9f'], [`${path.join('Patchable', '612644F.zip')}|612644F.rom`, 'f7591b29'], @@ -581,6 +657,7 @@ describe('with explicit DATs', () => { commands: ['copy', 'zip', 'test'], dat: [path.join(inputTemp, 'dats', '*')], input: [path.join(inputTemp, 'roms')], + inputExclude: [path.join(inputTemp, 'roms', 'nkit')], // will throw an error, preventing everything output: outputTemp, zipDatName: true, fixExtension: FixExtension[FixExtension.NEVER].toLowerCase(), @@ -593,26 +670,36 @@ describe('with explicit DATs', () => { [`${path.join('Hardware Target Game Database', 'Dummy', 'smdb.zip')}|Fizzbuzz.nes`, '370517b5'], [`${path.join('Hardware Target Game Database', 'Dummy', 'smdb.zip')}|Foobar.lnx`, 'b22c9747'], [`${path.join('Hardware Target Game Database', 'Dummy', 'smdb.zip')}|Lorem Ipsum.rom`, '70856527'], - ['Headered.zip|allpads.rom', '9180a163'], - ['Headered.zip|color_test.rom', 'c9c1b7aa'], - ['Headered.zip|diagnostic_test_cartridge.rom', 'f6cc9b1c'], - ['Headered.zip|fds_joypad_test.rom', '1e58456d'], - ['Headered.zip|LCDTestROM.rom', '2d251538'], - ['Headered.zip|speed_test_v51.rom', '9adca6cc'], + ['Headered.zip|allpads.nes', '9180a163'], + ['Headered.zip|color_test.nes', 'c9c1b7aa'], + ['Headered.zip|diagnostic_test_cartridge.a78', 'f6cc9b1c'], + ['Headered.zip|fds_joypad_test.fds', '1e58456d'], + ['Headered.zip|LCDTestROM.lnx', '2d251538'], + ['Headered.zip|speed_test_v51.smc', '9adca6cc'], ['Headerless.zip|allpads.nes', '6339abe6'], ['Headerless.zip|color_test.nes', 'c9c1b7aa'], ['Headerless.zip|diagnostic_test_cartridge.a78', 'a1eaa7c1'], ['Headerless.zip|fds_joypad_test.fds', '3ecbac61'], ['Headerless.zip|LCDTestROM.lyx', '42583855'], ['Headerless.zip|speed_test_v51.sfc', '8beffd94'], + [`One.zip|${path.join('CD-ROM', 'CD-ROM (Track 1).bin')}`, '49ca35fb'], + [`One.zip|${path.join('CD-ROM', 'CD-ROM (Track 2).bin')}`, '0316f720'], + [`One.zip|${path.join('CD-ROM', 'CD-ROM (Track 3).bin')}`, 'a320af40'], + [`One.zip|${path.join('CD-ROM', 'CD-ROM.cue')}`, '4ce39e73'], ['One.zip|Fizzbuzz.nes', '370517b5'], ['One.zip|Foobar.lnx', 'b22c9747'], + [`One.zip|${path.join('GD-ROM', 'GD-ROM.gdi')}`, 'f16f621c'], + [`One.zip|${path.join('GD-ROM', 'track01.bin')}`, '9796ed9a'], + [`One.zip|${path.join('GD-ROM', 'track02.raw')}`, 'abc178d5'], + [`One.zip|${path.join('GD-ROM', 'track03.bin')}`, '61a363f1'], + [`One.zip|${path.join('GD-ROM', 'track04.bin')}`, 'fc5ff5a0'], ['One.zip|Lorem Ipsum.zip', '7ee77289'], [`One.zip|${path.join('One Three', 'One.rom')}`, 'f817a89f'], [`One.zip|${path.join('One Three', 'Three.rom')}`, 'ff46c5d8'], [`One.zip|${path.join('Three Four Five', 'Five.rom')}`, '3e5daf67'], [`One.zip|${path.join('Three Four Five', 'Four.rom')}`, '1cf3ca74'], [`One.zip|${path.join('Three Four Five', 'Three.rom')}`, 'ff46c5d8'], + ['One.zip|UMD.iso', 'e90f7cf5'], ['Patchable.zip|0F09A40.rom', '2f943e86'], ['Patchable.zip|3708F2C.rom', '20891c9f'], ['Patchable.zip|612644F.rom', 'f7591b29'], @@ -622,6 +709,8 @@ describe('with explicit DATs', () => { ['Patchable.zip|Best.rom', '1e3d78cf'], ['Patchable.zip|C01173E.rom', 'dfaebe28'], ['Patchable.zip|KDULVQN.rom', 'b1c303e4'], + [`${path.join('Three Four Five', '2048')}|`, 'xxxxxxxx'], // hard disk + [`${path.join('Three Four Five', '4096')}|`, 'xxxxxxxx'], // hard disk ]); expect(result.cwdFilesAndCrcs).toHaveLength(0); expect(result.movedFiles).toHaveLength(0); @@ -651,15 +740,27 @@ describe('with explicit DATs', () => { [`${path.join('Headered', 'speed_test_v51.smc')} -> ${path.join('', 'headered', 'speed_test_v51.smc')}`, '9adca6cc'], [`${path.join('Headerless', 'color_test.nes')} -> ${path.join('', 'headered', 'color_test.nintendoentertainmentsystem')}`, 'c9c1b7aa'], [`${path.join('Headerless', 'speed_test_v51.sfc.gz')}|speed_test_v51.sfc -> ${path.join('', 'headerless', 'speed_test_v51.sfc.gz')}|speed_test_v51.sfc`, '8beffd94'], + [`${path.join('One', 'CD-ROM.chd')}|CD-ROM (Track 1).bin -> ${path.join('', 'chd', 'CD-ROM.chd')}|CD-ROM (Track 1).bin`, '49ca35fb'], + [`${path.join('One', 'CD-ROM.chd')}|CD-ROM (Track 2).bin -> ${path.join('', 'chd', 'CD-ROM.chd')}|CD-ROM (Track 2).bin`, '0316f720'], + [`${path.join('One', 'CD-ROM.chd')}|CD-ROM (Track 3).bin -> ${path.join('', 'chd', 'CD-ROM.chd')}|CD-ROM (Track 3).bin`, 'a320af40'], + [`${path.join('One', 'CD-ROM.chd')}|CD-ROM.cue -> ${path.join('', 'chd', 'CD-ROM.chd')}|CD-ROM.cue`, 'xxxxxxxx'], [`${path.join('One', 'Fizzbuzz.nes')} -> ${path.join('', 'raw', 'fizzbuzz.nes')}`, '370517b5'], [`${path.join('One', 'Foobar.lnx')} -> ${path.join('', 'foobar.lnx')}`, 'b22c9747'], + [`${path.join('One', 'GameCube NKit ISO.nkit.iso')}|GameCube NKit ISO.iso -> ${path.join('', 'nkit', '5bc2ce5b.nkit.iso')}|GameCube NKit ISO.iso`, '5bc2ce5b'], + [`${path.join('One', 'GD-ROM.chd')}|GD-ROM.gdi -> ${path.join('', 'chd', 'GD-ROM.chd')}|GD-ROM.gdi`, 'f16f621c'], + [`${path.join('One', 'GD-ROM.chd')}|track01.bin -> ${path.join('', 'chd', 'GD-ROM.chd')}|track01.bin`, '9796ed9a'], + [`${path.join('One', 'GD-ROM.chd')}|track02.raw -> ${path.join('', 'chd', 'GD-ROM.chd')}|track02.raw`, 'abc178d5'], + [`${path.join('One', 'GD-ROM.chd')}|track03.bin -> ${path.join('', 'chd', 'GD-ROM.chd')}|track03.bin`, '61a363f1'], + [`${path.join('One', 'GD-ROM.chd')}|track04.bin -> ${path.join('', 'chd', 'GD-ROM.chd')}|track04.bin`, 'fc5ff5a0'], [`${path.join('One', 'Lorem Ipsum.zip')}|loremipsum.rom -> ${path.join('', 'zip', 'loremipsum.zip')}|loremipsum.rom`, '70856527'], - [`${path.join('One', 'One Three.zip')}|${path.join('1', 'one.rom')} -> ${path.join('', 'zip', 'onetwothree.zip')}|${path.join('1', 'one.rom')}`, 'f817a89f'], - [`${path.join('One', 'One Three.zip')}|${path.join('2', 'two.rom')} -> ${path.join('', 'zip', 'onetwothree.zip')}|${path.join('2', 'two.rom')}`, '96170874'], - [`${path.join('One', 'One Three.zip')}|${path.join('3', 'three.rom')} -> ${path.join('', 'zip', 'onetwothree.zip')}|${path.join('3', 'three.rom')}`, 'ff46c5d8'], + [`${path.join('One', 'One Three', 'One.rom')} -> ${path.join('', 'raw', 'one.rom')}`, 'f817a89f'], + [`${path.join('One', 'One Three', 'Three.rom')} -> ${path.join('', 'raw', 'three.rom')}`, 'ff46c5d8'], + [`${path.join('One', 'Three Four Five', '2048')}| -> ${path.join('', 'chd', '2048.chd')}|`, 'xxxxxxxx'], // hard disk + [`${path.join('One', 'Three Four Five', '4096')}| -> ${path.join('', 'chd', '4096.chd')}|`, 'xxxxxxxx'], // hard disk [`${path.join('One', 'Three Four Five', 'Five.rom')} -> ${path.join('', 'raw', 'five.rom')}`, '3e5daf67'], [`${path.join('One', 'Three Four Five', 'Four.rom')} -> ${path.join('', 'raw', 'four.rom')}`, '1cf3ca74'], [`${path.join('One', 'Three Four Five', 'Three.rom')} -> ${path.join('', 'raw', 'three.rom')}`, 'ff46c5d8'], + [`${path.join('One', 'UMD.iso')} -> ${path.join('', 'discs', 'UMD.iso')}`, 'e90f7cf5'], [`${path.join('Patchable', '0F09A40.rom')} -> ${path.join('', 'patchable', '0F09A40.rom')}`, '2f943e86'], [`${path.join('Patchable', '3708F2C.rom')} -> ${path.join('', 'patchable', '3708F2C.rom')}`, '20891c9f'], [`${path.join('Patchable', '612644F.rom')} -> ${path.join('', 'patchable', '612644F.rom')}`, 'f7591b29'], @@ -688,7 +789,7 @@ describe('with explicit DATs', () => { commands: ['copy', 'extract', 'test'], dat: [path.join(inputTemp, 'dats', '*')], input: [path.join(inputTemp, 'roms')], - inputMinChecksum: ChecksumBitmask[ChecksumBitmask.MD5].toLowerCase(), + inputChecksumMin: ChecksumBitmask[ChecksumBitmask.MD5].toLowerCase(), patch: [path.join(inputTemp, 'patches')], output: outputTemp, dirDatName: true, @@ -710,14 +811,26 @@ describe('with explicit DATs', () => { [path.join('Headerless', 'fds_joypad_test.fds'), '3ecbac61'], [path.join('Headerless', 'LCDTestROM.lyx'), '42583855'], [path.join('Headerless', 'speed_test_v51.sfc'), '8beffd94'], + [path.join('One', 'CD-ROM', 'CD-ROM (Track 1).bin'), '49ca35fb'], + [path.join('One', 'CD-ROM', 'CD-ROM (Track 2).bin'), '0316f720'], + [path.join('One', 'CD-ROM', 'CD-ROM (Track 3).bin'), 'a320af40'], + [path.join('One', 'CD-ROM', 'CD-ROM.cue'), '4ce39e73'], [path.join('One', 'Fizzbuzz.nes'), '370517b5'], [path.join('One', 'Foobar.lnx'), 'b22c9747'], + [path.join('One', 'GD-ROM', 'GD-ROM.gdi'), 'f16f621c'], + [path.join('One', 'GD-ROM', 'track01.bin'), '9796ed9a'], + [path.join('One', 'GD-ROM', 'track02.raw'), 'abc178d5'], + [path.join('One', 'GD-ROM', 'track03.bin'), '61a363f1'], + [path.join('One', 'GD-ROM', 'track04.bin'), 'fc5ff5a0'], [`${path.join('One', 'Lorem Ipsum.zip')}|loremipsum.rom`, '70856527'], [path.join('One', 'One Three', 'One.rom'), 'f817a89f'], [path.join('One', 'One Three', 'Three.rom'), 'ff46c5d8'], + [`${path.join('One', 'Three Four Five', '2048')}|`, 'xxxxxxxx'], // hard disk + [`${path.join('One', 'Three Four Five', '4096')}|`, 'xxxxxxxx'], // hard disk [path.join('One', 'Three Four Five', 'Five.rom'), '3e5daf67'], [path.join('One', 'Three Four Five', 'Four.rom'), '1cf3ca74'], [path.join('One', 'Three Four Five', 'Three.rom'), 'ff46c5d8'], + [path.join('One', 'UMD.iso'), 'e90f7cf5'], [path.join('Patchable', '04C896D-GBA.rom'), 'b13eb478'], [path.join('Patchable', '0F09A40.rom'), '2f943e86'], [path.join('Patchable', '3708F2C.rom'), '20891c9f'], @@ -843,13 +956,17 @@ describe('with inferred DATs', () => { const result = await runIgir({ commands: ['copy', 'test'], input: [path.join(inputTemp, 'roms')], + inputExclude: [path.join(inputTemp, 'roms', 'discs')], // test archive scanning + matching output: outputTemp, fixExtension: FixExtension[FixExtension.AUTO].toLowerCase(), }); expect(result.outputFilesAndCrcs).toEqual([ ['0F09A40.rom', '2f943e86'], + ['2048.chd|', 'xxxxxxxx'], // hard disk ['3708F2C.rom', '20891c9f'], + ['4096.chd|', 'xxxxxxxx'], // hard disk + ['5bc2ce5b.nkit.iso|5bc2ce5b.iso', '5bc2ce5b'], ['612644F.rom', 'f7591b29'], ['65D1206.rom', '20323455'], ['92C85C9.rom', '06692159'], @@ -857,6 +974,10 @@ describe('with inferred DATs', () => { ['before.rom', '0361b321'], ['best.gz|best.rom', '1e3d78cf'], ['C01173E.rom', 'dfaebe28'], + ['CD-ROM.chd|CD-ROM (Track 1).bin', '49ca35fb'], + ['CD-ROM.chd|CD-ROM (Track 2).bin', '0316f720'], + ['CD-ROM.chd|CD-ROM (Track 3).bin', 'a320af40'], + ['CD-ROM.chd|CD-ROM.cue', 'xxxxxxxx'], ['color_test.nes', 'c9c1b7aa'], ['diagnostic_test_cartridge.a78.7z|diagnostic_test_cartridge.a78', 'f6cc9b1c'], ['empty.rom', '00000000'], @@ -867,6 +988,11 @@ describe('with inferred DATs', () => { ['four.rom', '1cf3ca74'], ['fourfive.zip|five.rom', '3e5daf67'], ['fourfive.zip|four.rom', '1cf3ca74'], + ['GD-ROM.chd|GD-ROM.gdi', 'f16f621c'], + ['GD-ROM.chd|track01.bin', '9796ed9a'], + ['GD-ROM.chd|track02.raw', 'abc178d5'], + ['GD-ROM.chd|track03.bin', '61a363f1'], + ['GD-ROM.chd|track04.bin', 'fc5ff5a0'], ['KDULVQN.rom', 'b1c303e4'], ['LCDTestROM.lnx.rar|LCDTestROM.lnx', '2d251538'], ['loremipsum.rom', '70856527'], @@ -878,6 +1004,7 @@ describe('with inferred DATs', () => { ['speed_test_v51.smc', '9adca6cc'], ['three.rom', 'ff46c5d8'], ['two.rom', '96170874'], + ['UMD.cso|UMD.iso', 'e90f7cf5'], ['unknown.rom', '377a7727'], ]); expect(result.cwdFilesAndCrcs).toHaveLength(0); @@ -888,13 +1015,13 @@ describe('with inferred DATs', () => { it('should move to the same directory', async () => { await copyFixturesToTemp(async (inputTemp, outputTemp) => { - const inputDir = path.join(inputTemp, 'roms', 'raw'); + const inputDir = path.join(inputTemp, 'roms'); const inputBefore = await walkWithCrc(inputDir, inputDir); await runIgir({ commands: ['move', 'test'], input: [inputDir], - output: inputDir, + output: '{inputDirname}', }); await expect(walkWithCrc(inputDir, inputDir)).resolves.toEqual(inputBefore); @@ -907,6 +1034,7 @@ describe('with inferred DATs', () => { const result = await runIgir({ commands: ['move', 'extract', 'test'], input: [path.join(inputTemp, 'roms')], + inputExclude: [path.join(inputTemp, 'roms', 'discs')], // test archive scanning + matching output: outputTemp, dirGameSubdir: GameSubdirMode[GameSubdirMode.MULTIPLE].toLowerCase(), fixExtension: FixExtension[FixExtension.AUTO].toLowerCase(), @@ -914,7 +1042,9 @@ describe('with inferred DATs', () => { expect(result.outputFilesAndCrcs).toEqual([ ['0F09A40.rom', '2f943e86'], + ['2048.rom', 'd774f042'], ['3708F2C.rom', '20891c9f'], + ['4096.rom', '2e19ca09'], ['612644F.rom', 'f7591b29'], ['65D1206.rom', '20323455'], ['92C85C9.rom', '06692159'], @@ -932,6 +1062,11 @@ describe('with inferred DATs', () => { ['four.rom', '1cf3ca74'], [path.join('fourfive', 'five.rom'), '3e5daf67'], [path.join('fourfive', 'four.rom'), '1cf3ca74'], + [path.join('GD-ROM', 'GD-ROM.gdi'), 'f16f621c'], + [path.join('GD-ROM', 'track01.bin'), '9796ed9a'], + [path.join('GD-ROM', 'track02.raw'), 'abc178d5'], + [path.join('GD-ROM', 'track03.bin'), '61a363f1'], + [path.join('GD-ROM', 'track04.bin'), 'fc5ff5a0'], ['KDULVQN.rom', 'b1c303e4'], ['LCDTestROM.lnx', '2d251538'], ['loremipsum.rom', '70856527'], @@ -943,12 +1078,15 @@ describe('with inferred DATs', () => { ['speed_test_v51.smc', '9adca6cc'], ['three.rom', 'ff46c5d8'], ['two.rom', '96170874'], + ['UMD.iso', 'e90f7cf5'], ['unknown.rom', '377a7727'], ]); expect(result.cwdFilesAndCrcs).toHaveLength(0); expect(result.movedFiles).toEqual([ - path.join('empty.rom'), - path.join('foobar.lnx'), + path.join('chd', 'GD-ROM.chd'), + path.join('cso', 'UMD.cso'), + 'empty.rom', + 'foobar.lnx', path.join('headered', 'LCDTestROM.lnx.rar'), path.join('headered', 'allpads.nes'), path.join('headered', 'color_test.nintendoentertainmentsystem'), @@ -983,14 +1121,18 @@ describe('with inferred DATs', () => { const result = await runIgir({ commands: ['copy', 'zip', 'test'], input: [path.join(inputTemp, 'roms')], - // Note: need to de-conflict headered & headerless ROMs due to duplicate output paths - inputExclude: [path.join(inputTemp, 'roms', 'headerless')], + inputExclude: [ + // Note: need to exclude some ROMs to prevent duplicate output paths + path.join(inputTemp, 'roms', 'headerless'), // de-conflict headered & headerless + ], output: outputTemp, }); expect(result.outputFilesAndCrcs).toEqual([ ['0F09A40.zip|0F09A40.rom', '2f943e86'], + ['2048.zip|2048.rom', 'd774f042'], ['3708F2C.zip|3708F2C.rom', '20891c9f'], + ['4096.zip|4096.rom', '2e19ca09'], ['612644F.zip|612644F.rom', 'f7591b29'], ['65D1206.zip|65D1206.rom', '20323455'], ['92C85C9.zip|92C85C9.rom', '06692159'], @@ -998,6 +1140,10 @@ describe('with inferred DATs', () => { ['before.zip|before.rom', '0361b321'], ['best.zip|best.rom', '1e3d78cf'], ['C01173E.zip|C01173E.rom', 'dfaebe28'], + ['CD-ROM.zip|CD-ROM (Track 1).bin', '49ca35fb'], + ['CD-ROM.zip|CD-ROM (Track 2).bin', '0316f720'], + ['CD-ROM.zip|CD-ROM (Track 3).bin', 'a320af40'], + ['CD-ROM.zip|CD-ROM.cue', '4ce39e73'], ['color_test.zip|color_test.nintendoentertainmentsystem', 'c9c1b7aa'], ['diagnostic_test_cartridge.zip|diagnostic_test_cartridge.a78', 'f6cc9b1c'], ['empty.zip|empty.rom', '00000000'], @@ -1008,6 +1154,11 @@ describe('with inferred DATs', () => { ['four.zip|four.rom', '1cf3ca74'], ['fourfive.zip|five.rom', '3e5daf67'], ['fourfive.zip|four.rom', '1cf3ca74'], + ['GD-ROM.zip|GD-ROM.gdi', 'f16f621c'], + ['GD-ROM.zip|track01.bin', '9796ed9a'], + ['GD-ROM.zip|track02.raw', 'abc178d5'], + ['GD-ROM.zip|track03.bin', '61a363f1'], + ['GD-ROM.zip|track04.bin', 'fc5ff5a0'], ['KDULVQN.zip|KDULVQN.rom', 'b1c303e4'], ['LCDTestROM.zip|LCDTestROM.lnx', '2d251538'], ['loremipsum.zip|loremipsum.rom', '70856527'], @@ -1018,6 +1169,7 @@ describe('with inferred DATs', () => { ['speed_test_v51.zip|speed_test_v51.smc', '9adca6cc'], ['three.zip|three.rom', 'ff46c5d8'], ['two.zip|two.rom', '96170874'], + ['UMD.zip|UMD.iso', 'e90f7cf5'], ['unknown.zip|unknown.rom', '377a7727'], ]); expect(result.cwdFilesAndCrcs).toHaveLength(0); @@ -1031,6 +1183,10 @@ describe('with inferred DATs', () => { const result = await runIgir({ commands: ['link', 'test'], input: [path.join(inputTemp, 'roms')], + inputExclude: [ + // Note: need to exclude some ROMs to prevent duplicate output paths + path.join(inputTemp, 'roms', 'discs'), // de-conflict chd & discs + ], output: outputTemp, fixExtension: FixExtension[FixExtension.AUTO].toLowerCase(), symlink: true, @@ -1039,7 +1195,10 @@ describe('with inferred DATs', () => { expect(result.outputFilesAndCrcs).toEqual([ [`0F09A40.rom -> ${path.join('..', 'input', 'roms', 'patchable', '0F09A40.rom')}`, '2f943e86'], + [`2048.chd| -> ${path.join('..', 'input', 'roms', 'chd', '2048.chd|')}`, 'xxxxxxxx'], // hard disk [`3708F2C.rom -> ${path.join('..', 'input', 'roms', 'patchable', '3708F2C.rom')}`, '20891c9f'], + [`4096.chd| -> ${path.join('..', 'input', 'roms', 'chd', '4096.chd|')}`, 'xxxxxxxx'], // hard disk + [`5bc2ce5b.nkit.iso|5bc2ce5b.iso -> ${path.join('..', 'input', 'roms', 'nkit', '5bc2ce5b.nkit.iso')}|5bc2ce5b.iso`, '5bc2ce5b'], [`612644F.rom -> ${path.join('..', 'input', 'roms', 'patchable', '612644F.rom')}`, 'f7591b29'], [`65D1206.rom -> ${path.join('..', 'input', 'roms', 'patchable', '65D1206.rom')}`, '20323455'], [`92C85C9.rom -> ${path.join('..', 'input', 'roms', 'patchable', '92C85C9.rom')}`, '06692159'], @@ -1047,6 +1206,10 @@ describe('with inferred DATs', () => { [`before.rom -> ${path.join('..', 'input', 'roms', 'patchable', 'before.rom')}`, '0361b321'], [`best.gz|best.rom -> ${path.join('..', 'input', 'roms', 'patchable', 'best.gz')}|best.rom`, '1e3d78cf'], [`C01173E.rom -> ${path.join('..', 'input', 'roms', 'patchable', 'C01173E.rom')}`, 'dfaebe28'], + [`CD-ROM.chd|CD-ROM (Track 1).bin -> ${path.join('..', 'input', 'roms', 'chd', 'CD-ROM.chd')}|CD-ROM (Track 1).bin`, '49ca35fb'], + [`CD-ROM.chd|CD-ROM (Track 2).bin -> ${path.join('..', 'input', 'roms', 'chd', 'CD-ROM.chd')}|CD-ROM (Track 2).bin`, '0316f720'], + [`CD-ROM.chd|CD-ROM (Track 3).bin -> ${path.join('..', 'input', 'roms', 'chd', 'CD-ROM.chd')}|CD-ROM (Track 3).bin`, 'a320af40'], + [`CD-ROM.chd|CD-ROM.cue -> ${path.join('..', 'input', 'roms', 'chd', 'CD-ROM.chd')}|CD-ROM.cue`, 'xxxxxxxx'], [`color_test.nes -> ${path.join('..', 'input', 'roms', 'headered', 'color_test.nintendoentertainmentsystem')}`, 'c9c1b7aa'], [`diagnostic_test_cartridge.a78.7z|diagnostic_test_cartridge.a78 -> ${path.join('..', 'input', 'roms', 'headered', 'diagnostic_test_cartridge.a78.7z')}|diagnostic_test_cartridge.a78`, 'f6cc9b1c'], [`empty.rom -> ${path.join('..', 'input', 'roms', 'empty.rom')}`, '00000000'], @@ -1057,6 +1220,11 @@ describe('with inferred DATs', () => { [`four.rom -> ${path.join('..', 'input', 'roms', 'raw', 'four.rom')}`, '1cf3ca74'], [`fourfive.zip|five.rom -> ${path.join('..', 'input', 'roms', 'zip', 'fourfive.zip')}|five.rom`, '3e5daf67'], [`fourfive.zip|four.rom -> ${path.join('..', 'input', 'roms', 'zip', 'fourfive.zip')}|four.rom`, '1cf3ca74'], + [`GD-ROM.chd|GD-ROM.gdi -> ${path.join('..', 'input', 'roms', 'chd', 'GD-ROM.chd|GD-ROM.gdi')}`, 'f16f621c'], + [`GD-ROM.chd|track01.bin -> ${path.join('..', 'input', 'roms', 'chd', 'GD-ROM.chd|track01.bin')}`, '9796ed9a'], + [`GD-ROM.chd|track02.raw -> ${path.join('..', 'input', 'roms', 'chd', 'GD-ROM.chd|track02.raw')}`, 'abc178d5'], + [`GD-ROM.chd|track03.bin -> ${path.join('..', 'input', 'roms', 'chd', 'GD-ROM.chd|track03.bin')}`, '61a363f1'], + [`GD-ROM.chd|track04.bin -> ${path.join('..', 'input', 'roms', 'chd', 'GD-ROM.chd|track04.bin')}`, 'fc5ff5a0'], [`KDULVQN.rom -> ${path.join('..', 'input', 'roms', 'patchable', 'KDULVQN.rom')}`, 'b1c303e4'], [`LCDTestROM.lnx.rar|LCDTestROM.lnx -> ${path.join('..', 'input', 'roms', 'headered', 'LCDTestROM.lnx.rar')}|LCDTestROM.lnx`, '2d251538'], [`loremipsum.rom -> ${path.join('..', 'input', 'roms', 'raw', 'loremipsum.rom')}`, '70856527'], @@ -1068,6 +1236,7 @@ describe('with inferred DATs', () => { [`speed_test_v51.smc -> ${path.join('..', 'input', 'roms', 'headered', 'speed_test_v51.smc')}`, '9adca6cc'], [`three.rom -> ${path.join('..', 'input', 'roms', 'raw', 'three.rom')}`, 'ff46c5d8'], [`two.rom -> ${path.join('..', 'input', 'roms', 'raw', 'two.rom')}`, '96170874'], + [`UMD.cso|UMD.iso -> ${path.join('..', 'input', 'roms', 'cso', 'UMD.cso')}|UMD.iso`, 'e90f7cf5'], [`unknown.rom -> ${path.join('..', 'input', 'roms', 'raw', 'unknown.rom')}`, '377a7727'], ]); expect(result.cwdFilesAndCrcs).toHaveLength(0); @@ -1112,6 +1281,11 @@ describe('with inferred DATs', () => { const result = await runIgir({ commands: ['dir2dat'], input: [path.join(inputTemp, 'roms')], + inputExclude: [ + // Note: need to exclude some ROMs to prevent duplicate output paths + path.join(inputTemp, 'roms', 'discs'), // de-conflict chd & discs + path.join(inputTemp, 'roms', 'nkit'), // will throw an error, preventing everything + ], output: outputTemp, dirDatName: true, fixExtension: FixExtension[FixExtension.AUTO].toLowerCase(), @@ -1132,21 +1306,31 @@ describe('with inferred DATs', () => { const dats = await new DATScanner( new Options({ dat: writtenDir2Dats }), new ProgressBarFake(), + new FileFactory(new FileCache()), ).scan(); expect(dats).toHaveLength(1); const roms = dats[0].getGames() .flatMap((game) => game.getRoms()) .map((rom) => rom.getName()) + .reduce(ArrayPoly.reduceUnique(), []) .sort(); expect(roms).toEqual([ '0F09A40.rom', + '2048.rom', '3708F2C.rom', + '4096.rom', '612644F.rom', '65D1206.rom', '92C85C9.rom', 'C01173E.rom', + 'CD-ROM (Track 1).bin', + 'CD-ROM (Track 2).bin', + 'CD-ROM (Track 3).bin', + 'CD-ROM.cue', + 'GD-ROM.gdi', 'KDULVQN.rom', 'LCDTestROM.lnx', + 'UMD.iso', 'allpads.nes', 'before.rom', 'best.rom', @@ -1154,19 +1338,18 @@ describe('with inferred DATs', () => { 'diagnostic_test_cartridge.a78', 'fds_joypad_test.fds', 'five.rom', - 'five.rom', 'fizzbuzz.nes', 'foobar.lnx', 'four.rom', - 'four.rom', 'loremipsum.rom', 'one.rom', - 'one.rom', 'speed_test_v51.sfc', 'speed_test_v51.smc', 'three.rom', - 'three.rom', - 'two.rom', + 'track01.bin', + 'track02.raw', + 'track03.bin', + 'track04.bin', 'two.rom', 'unknown.rom', ]); diff --git a/test/modules/argumentsParser.test.ts b/test/modules/argumentsParser.test.ts index f5a0f5b65..e331f1ba1 100644 --- a/test/modules/argumentsParser.test.ts +++ b/test/modules/argumentsParser.test.ts @@ -6,14 +6,13 @@ import LogLevel from '../../src/console/logLevel.js'; import Temp from '../../src/globals/temp.js'; import ArgumentsParser from '../../src/modules/argumentsParser.js'; import FsPoly from '../../src/polyfill/fsPoly.js'; -import Header from '../../src/types/dats/logiqx/header.js'; -import LogiqxDAT from '../../src/types/dats/logiqx/logiqxDat.js'; +import ROM from '../../src/types/dats/rom.js'; import { ChecksumBitmask } from '../../src/types/files/fileChecksums.js'; import { FixExtension, GameSubdirMode, InputChecksumArchivesMode, - MergeMode, + MergeMode, PreferRevision, } from '../../src/types/options.js'; const dummyRequiredArgs = ['--input', os.devNull, '--output', os.devNull]; @@ -52,7 +51,7 @@ describe('commands', () => { expect(argumentsParser.parse(['move', ...dummyRequiredArgs]).shouldCopy()).toEqual(false); expect(argumentsParser.parse(['copy', ...dummyRequiredArgs]).shouldMove()).toEqual(false); expect(argumentsParser.parse(['copy', ...dummyRequiredArgs]).shouldExtract()).toEqual(false); - expect(argumentsParser.parse(['copy', ...dummyRequiredArgs]).shouldZipFile('')).toEqual(false); + expect(argumentsParser.parse(['copy', ...dummyRequiredArgs]).shouldZipRom(new ROM({ name: '', size: 0 }))).toEqual(false); expect(argumentsParser.parse(['copy', ...dummyRequiredArgs]).shouldTest()).toEqual(false); expect(argumentsParser.parse(['copy', ...dummyRequiredArgs]).shouldDir2Dat()).toEqual(false); expect(argumentsParser.parse(['copy', ...dummyRequiredArgs]).shouldFixdat()).toEqual(false); @@ -65,7 +64,7 @@ describe('commands', () => { expect(argumentsParser.parse(datCommands).shouldCopy()).toEqual(true); expect(argumentsParser.parse(datCommands).shouldMove()).toEqual(false); expect(argumentsParser.parse(datCommands).shouldExtract()).toEqual(true); - expect(argumentsParser.parse(datCommands).shouldZipFile('')).toEqual(false); + expect(argumentsParser.parse(datCommands).shouldZipRom(new ROM({ name: '', size: 0 }))).toEqual(false); expect(argumentsParser.parse(datCommands).shouldTest()).toEqual(true); expect(argumentsParser.parse(datCommands).shouldDir2Dat()).toEqual(false); expect(argumentsParser.parse(datCommands).shouldFixdat()).toEqual(false); @@ -76,7 +75,7 @@ describe('commands', () => { expect(argumentsParser.parse(nonDatCommands).shouldCopy()).toEqual(true); expect(argumentsParser.parse(nonDatCommands).shouldMove()).toEqual(false); expect(argumentsParser.parse(nonDatCommands).shouldExtract()).toEqual(true); - expect(argumentsParser.parse(nonDatCommands).shouldZipFile('')).toEqual(false); + expect(argumentsParser.parse(nonDatCommands).shouldZipRom(new ROM({ name: '', size: 0 }))).toEqual(false); expect(argumentsParser.parse(nonDatCommands).shouldTest()).toEqual(true); expect(argumentsParser.parse(nonDatCommands).shouldDir2Dat()).toEqual(true); expect(argumentsParser.parse(nonDatCommands).shouldFixdat()).toEqual(false); @@ -87,7 +86,7 @@ describe('commands', () => { expect(argumentsParser.parse(moveZip).shouldCopy()).toEqual(false); expect(argumentsParser.parse(moveZip).shouldMove()).toEqual(true); expect(argumentsParser.parse(moveZip).shouldExtract()).toEqual(false); - expect(argumentsParser.parse(moveZip).shouldZipFile('')).toEqual(true); + expect(argumentsParser.parse(moveZip).shouldZipRom(new ROM({ name: '', size: 0 }))).toEqual(true); expect(argumentsParser.parse(moveZip).shouldTest()).toEqual(true); expect(argumentsParser.parse(moveZip).shouldDir2Dat()).toEqual(false); expect(argumentsParser.parse(moveZip).shouldFixdat()).toEqual(true); @@ -121,7 +120,9 @@ describe('options', () => { expect(options.shouldReport()).toEqual(false); expect(options.getInputPaths()).toEqual([os.devNull]); - expect(options.getInputMinChecksum()).toEqual(ChecksumBitmask.CRC32); + expect(options.getInputChecksumQuick()).toEqual(false); + expect(options.getInputChecksumMin()).toEqual(ChecksumBitmask.CRC32); + expect(options.getInputChecksumMax()).toBeUndefined(); expect(options.getInputChecksumArchives()).toEqual(InputChecksumArchivesMode.AUTO); expect(options.getDatNameRegex()).toBeUndefined(); @@ -141,9 +142,10 @@ describe('options', () => { expect(options.getDirLetterLimit()).toEqual(0); expect(options.getDirLetterGroup()).toEqual(false); expect(options.getDirGameSubdir()).toEqual(GameSubdirMode.MULTIPLE); + + expect(options.getFixExtension()).toEqual(FixExtension.AUTO); expect(options.getOverwrite()).toEqual(false); expect(options.getOverwriteInvalid()).toEqual(false); - expect(options.getFixExtension()).toEqual(FixExtension.AUTO); expect(options.getCleanBackup()).toBeUndefined(); expect(options.getCleanDryRun()).toEqual(false); @@ -154,6 +156,9 @@ describe('options', () => { expect(options.getSymlinkRelative()).toEqual(false); expect(options.getMergeRoms()).toEqual(MergeMode.FULLNONMERGED); + expect(options.getExcludeDisks()).toEqual(false); + expect(options.getAllowExcessSets()).toEqual(false); + expect(options.getAllowIncompleteSets()).toEqual(false); expect(options.getFilterRegex()).toBeUndefined(); expect(options.getFilterRegexExclude()).toBeUndefined(); @@ -194,11 +199,8 @@ describe('options', () => { expect(options.getPreferGood()).toEqual(false); expect(options.getPreferLanguages()).toHaveLength(0); expect(options.getPreferRegions()).toHaveLength(0); - expect(options.getPreferRevisionNewer()).toEqual(false); - expect(options.getPreferRevisionOlder()).toEqual(false); + expect(options.getPreferRevision()).toBeUndefined(); expect(options.getPreferRetail()).toEqual(false); - expect(options.getPreferNTSC()).toEqual(false); - expect(options.getPreferPAL()).toEqual(false); expect(options.getPreferParent()).toEqual(false); expect(options.getDatThreads()).toEqual(3); @@ -210,15 +212,6 @@ describe('options', () => { expect(options.getHelp()).toEqual(false); }); - it('should parse "fixdat"', () => { - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--fixdat']).shouldFixdat()).toEqual(true); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--fixdat', 'true']).shouldFixdat()).toEqual(true); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--fixdat', 'false']).shouldFixdat()).toEqual(false); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--fixdat', '--fixdat']).shouldFixdat()).toEqual(true); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--fixdat', 'false', '--fixdat', 'true']).shouldFixdat()).toEqual(true); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--fixdat', 'true', '--fixdat', 'false']).shouldFixdat()).toEqual(false); - }); - it('should parse "input"', async () => { expect(() => argumentsParser.parse(['copy', '--output', os.devNull])).toThrow(/missing required argument/i); expect(() => argumentsParser.parse(['move', '--output', os.devNull])).toThrow(/missing required argument/i); @@ -248,15 +241,40 @@ describe('options', () => { expect((await argumentsParser.parse(['copy', '--input', './src', '--output', os.devNull, '--input-exclude', './src']).scanInputFilesWithoutExclusions()).length).toEqual(0); }); - it('should parse "input-min-checksum', () => { - expect(argumentsParser.parse(dummyCommandAndRequiredArgs).getInputMinChecksum()) - .toEqual(ChecksumBitmask.CRC32); - expect(() => argumentsParser.parse([...dummyCommandAndRequiredArgs, '--input-min-checksum', 'foobar']).getInputMinChecksum()).toThrow(/invalid values/i); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--input-min-checksum', 'CRC32']).getInputMinChecksum()).toEqual(ChecksumBitmask.CRC32); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--input-min-checksum', 'MD5']).getInputMinChecksum()).toEqual(ChecksumBitmask.MD5); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--input-min-checksum', 'SHA1']).getInputMinChecksum()).toEqual(ChecksumBitmask.SHA1); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--input-min-checksum', 'SHA256']).getInputMinChecksum()).toEqual(ChecksumBitmask.SHA256); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--input-min-checksum', 'SHA256', '--input-min-checksum', 'CRC32']).getInputMinChecksum()).toEqual(ChecksumBitmask.CRC32); + it('should parse "input-checksum-quick"', () => { + expect(() => argumentsParser.parse([...dummyCommandAndRequiredArgs, '--input-checksum-quick', '--input-checksum-min', 'MD5'])).toThrow(/mutually exclusive/i); + expect(() => argumentsParser.parse([...dummyCommandAndRequiredArgs, '--input-checksum-quick', '--input-checksum-min', 'SHA1'])).toThrow(/mutually exclusive/i); + expect(() => argumentsParser.parse([...dummyCommandAndRequiredArgs, '--input-checksum-quick', '--input-checksum-min', 'SHA256'])).toThrow(/mutually exclusive/i); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--input-checksum-quick']).getInputChecksumQuick()).toEqual(true); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--input-checksum-quick', 'true']).getInputChecksumQuick()).toEqual(true); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--input-checksum-quick', 'false']).getInputChecksumQuick()).toEqual(false); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--input-checksum-quick', '--input-checksum-quick']).getInputChecksumQuick()).toEqual(true); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--input-checksum-quick', 'false', '--input-checksum-quick', 'true']).getInputChecksumQuick()).toEqual(true); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--input-checksum-quick', 'true', '--input-checksum-quick', 'false']).getInputChecksumQuick()).toEqual(false); + }); + + it('should parse "input-checksum-min', () => { + expect(() => argumentsParser.parse([...dummyCommandAndRequiredArgs, '--input-checksum-min', 'foobar']).getInputChecksumMin()).toThrow(/invalid values/i); + expect(() => argumentsParser.parse([...dummyCommandAndRequiredArgs, '--input-checksum-min', 'MD5', '--input-checksum-max', 'CRC32']).getInputChecksumMin()).toThrow(/min.+max/i); + expect(() => argumentsParser.parse([...dummyCommandAndRequiredArgs, '--input-checksum-min', 'SHA1', '--input-checksum-max', 'CRC32']).getInputChecksumMin()).toThrow(/min.+max/i); + expect(() => argumentsParser.parse([...dummyCommandAndRequiredArgs, '--input-checksum-min', 'SHA256', '--input-checksum-max', 'CRC32']).getInputChecksumMin()).toThrow(/min.+max/i); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--input-checksum-min', 'CRC32']).getInputChecksumMin()).toEqual(ChecksumBitmask.CRC32); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--input-checksum-min', 'MD5']).getInputChecksumMin()).toEqual(ChecksumBitmask.MD5); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--input-checksum-min', 'SHA1']).getInputChecksumMin()).toEqual(ChecksumBitmask.SHA1); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--input-checksum-min', 'SHA256']).getInputChecksumMin()).toEqual(ChecksumBitmask.SHA256); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--input-checksum-min', 'SHA256', '--input-checksum-min', 'CRC32']).getInputChecksumMin()).toEqual(ChecksumBitmask.CRC32); + }); + + it('should parse "input-checksum-max', () => { + expect(() => argumentsParser.parse([...dummyCommandAndRequiredArgs, '--input-checksum-max', 'foobar']).getInputChecksumMax()).toThrow(/invalid values/i); + expect(() => argumentsParser.parse([...dummyCommandAndRequiredArgs, '--input-checksum-min', 'SHA256', '--input-checksum-max', 'CRC32']).getInputChecksumMax()).toThrow(/min.+max/i); + expect(() => argumentsParser.parse([...dummyCommandAndRequiredArgs, '--input-checksum-min', 'SHA256', '--input-checksum-max', 'MD5']).getInputChecksumMax()).toThrow(/min.+max/i); + expect(() => argumentsParser.parse([...dummyCommandAndRequiredArgs, '--input-checksum-min', 'SHA256', '--input-checksum-max', 'SHA1']).getInputChecksumMax()).toThrow(/min.+max/i); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--input-checksum-max', 'CRC32']).getInputChecksumMax()).toEqual(ChecksumBitmask.CRC32); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--input-checksum-max', 'MD5']).getInputChecksumMax()).toEqual(ChecksumBitmask.MD5); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--input-checksum-max', 'SHA1']).getInputChecksumMax()).toEqual(ChecksumBitmask.SHA1); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--input-checksum-max', 'SHA256']).getInputChecksumMax()).toEqual(ChecksumBitmask.SHA256); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--input-checksum-max', 'SHA256', '--input-checksum-max', 'CRC32']).getInputChecksumMax()).toEqual(ChecksumBitmask.CRC32); }); it('should parse "input-checksum-archives"', () => { @@ -302,10 +320,6 @@ describe('options', () => { expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat-name-regex', '[a-z]']).getDatNameRegex()?.some((regex) => regex.test('UPPER'))).toEqual(false); expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat-name-regex', '/[a-z]/i']).getDatNameRegex()?.some((regex) => regex.test('UPPER'))).toEqual(true); expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat-name-regex', '/[a-z]/i', '--dat-name-regex', '[0-9]']).getDatNameRegex()?.some((regex) => regex.test('UPPER'))).toEqual(false); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat-regex', '[a-z]']).getDatNameRegex()?.some((regex) => regex.test('lower'))).toEqual(true); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat-regex', '[a-z]']).getDatNameRegex()?.some((regex) => regex.test('UPPER'))).toEqual(false); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat-regex', '/[a-z]/i']).getDatNameRegex()?.some((regex) => regex.test('UPPER'))).toEqual(true); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat-regex', '/[a-z]/i', '--dat-regex', '[0-9]']).getDatNameRegex()?.some((regex) => regex.test('UPPER'))).toEqual(false); const tempFile = await FsPoly.mktemp(path.join(Temp.getTempDir(), 'temp')); await FsPoly.mkdir(path.dirname(tempFile), { recursive: true }); @@ -328,10 +342,6 @@ describe('options', () => { expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat-name-regex-exclude', '[a-z]']).getDatNameRegexExclude()?.some((regex) => regex.test('UPPER'))).toEqual(false); expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat-name-regex-exclude', '/[a-z]/i']).getDatNameRegexExclude()?.some((regex) => regex.test('UPPER'))).toEqual(true); expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat-name-regex-exclude', '/[a-z]/i', '--dat-name-regex-exclude', '[0-9]']).getDatNameRegexExclude()?.some((regex) => regex.test('UPPER'))).toEqual(false); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat-regex-exclude', '[a-z]']).getDatNameRegexExclude()?.some((regex) => regex.test('lower'))).toEqual(true); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat-regex-exclude', '[a-z]']).getDatNameRegexExclude()?.some((regex) => regex.test('UPPER'))).toEqual(false); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat-regex-exclude', '/[a-z]/i']).getDatNameRegexExclude()?.some((regex) => regex.test('UPPER'))).toEqual(true); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat-regex-exclude', '/[a-z]/i', '--dat-regex-exclude', '[0-9]']).getDatNameRegexExclude()?.some((regex) => regex.test('UPPER'))).toEqual(false); const tempFile = await FsPoly.mktemp(path.join(Temp.getTempDir(), 'temp')); await FsPoly.mkdir(path.dirname(tempFile), { recursive: true }); @@ -537,16 +547,6 @@ describe('options', () => { expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--fix-extension', 'always', '--fix-extension', 'never']).getFixExtension()).toEqual(FixExtension.NEVER); }); - it('should parse "single"', () => { - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '-s']).getSingle()).toEqual(true); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--single']).getSingle()).toEqual(true); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--single', 'true']).getSingle()).toEqual(true); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--single', 'false']).getSingle()).toEqual(false); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--single', '--single']).getSingle()).toEqual(true); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--single', 'false', '--single', 'true']).getSingle()).toEqual(true); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--single', 'true', '--single', 'false']).getSingle()).toEqual(false); - }); - it('should parse "overwrite"', () => { expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '-O']).getOverwrite()).toEqual(true); expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--overwrite']).getOverwrite()).toEqual(true); @@ -595,12 +595,12 @@ describe('options', () => { }); it('should parse "zip-exclude"', () => { - const filePath = 'roms/test.rom'; - expect(argumentsParser.parse(['copy', 'zip', '--input', os.devNull, '--output', os.devNull]).shouldZipFile(filePath)).toEqual(true); - expect(argumentsParser.parse(['copy', 'zip', '--input', os.devNull, '--output', os.devNull, '-Z', os.devNull]).shouldZipFile(filePath)).toEqual(true); - expect(argumentsParser.parse(['copy', 'zip', '--input', os.devNull, '--output', os.devNull, '-Z', '**/*']).shouldZipFile(filePath)).toEqual(false); - expect(argumentsParser.parse(['copy', 'zip', '--input', os.devNull, '--output', os.devNull, '-Z', '**/*.rom']).shouldZipFile(filePath)).toEqual(false); - expect(argumentsParser.parse(['copy', 'zip', '--input', os.devNull, '--output', os.devNull, '--zip-exclude', '**/*.rom']).shouldZipFile(filePath)).toEqual(false); + const rom = new ROM({ name: 'roms/test.rom', size: 0 }); + expect(argumentsParser.parse(['copy', 'zip', '--input', os.devNull, '--output', os.devNull]).shouldZipRom(rom)).toEqual(true); + expect(argumentsParser.parse(['copy', 'zip', '--input', os.devNull, '--output', os.devNull, '-Z', os.devNull]).shouldZipRom(rom)).toEqual(true); + expect(argumentsParser.parse(['copy', 'zip', '--input', os.devNull, '--output', os.devNull, '-Z', '**/*']).shouldZipRom(rom)).toEqual(false); + expect(argumentsParser.parse(['copy', 'zip', '--input', os.devNull, '--output', os.devNull, '-Z', '**/*.rom']).shouldZipRom(rom)).toEqual(false); + expect(argumentsParser.parse(['copy', 'zip', '--input', os.devNull, '--output', os.devNull, '--zip-exclude', '**/*.rom']).shouldZipRom(rom)).toEqual(false); }); it('should parse "zip-dat-name"', () => { @@ -614,14 +614,6 @@ describe('options', () => { }); it('should parse "symlink"', () => { - expect(argumentsParser.parse(['symlink', ...dummyRequiredArgs]).getSymlink()).toEqual(true); - expect(argumentsParser.parse(['symlink', ...dummyRequiredArgs, '--symlink']).getSymlink()).toEqual(true); - expect(argumentsParser.parse(['symlink', ...dummyRequiredArgs, '--symlink', 'true']).getSymlink()).toEqual(true); - expect(argumentsParser.parse(['symlink', ...dummyRequiredArgs, '--symlink', 'false']).getSymlink()).toEqual(false); - expect(argumentsParser.parse(['symlink', ...dummyRequiredArgs, '--symlink', '--symlink']).getSymlink()).toEqual(true); - expect(argumentsParser.parse(['symlink', ...dummyRequiredArgs, '--symlink', 'false', '--symlink', 'true']).getSymlink()).toEqual(true); - expect(argumentsParser.parse(['symlink', ...dummyRequiredArgs, '--symlink', 'true', '--symlink', 'false']).getSymlink()).toEqual(false); - expect(() => argumentsParser.parse([...dummyCommandAndRequiredArgs, '--symlink']).getSymlink()).toThrow(/missing required command/i); expect(argumentsParser.parse(['link', ...dummyRequiredArgs]).getSymlink()).toEqual(false); expect(argumentsParser.parse(['link', ...dummyRequiredArgs, '--symlink']).getSymlink()).toEqual(true); @@ -633,14 +625,6 @@ describe('options', () => { }); it('should parse "symlink-relative"', () => { - expect(() => argumentsParser.parse([...dummyCommandAndRequiredArgs, '--symlink-relative'])).toThrow(/dependent|implication/i); - expect(argumentsParser.parse(['symlink', ...dummyRequiredArgs, '--symlink-relative']).getSymlinkRelative()).toEqual(true); - expect(argumentsParser.parse(['symlink', ...dummyRequiredArgs, '--symlink-relative', 'true']).getSymlinkRelative()).toEqual(true); - expect(argumentsParser.parse(['symlink', ...dummyRequiredArgs, '--symlink-relative', 'false']).getSymlinkRelative()).toEqual(false); - expect(argumentsParser.parse(['symlink', ...dummyRequiredArgs, '--symlink-relative', '--symlink-relative']).getSymlinkRelative()).toEqual(true); - expect(argumentsParser.parse(['symlink', ...dummyRequiredArgs, '--symlink-relative', 'false', '--symlink-relative', 'true']).getSymlinkRelative()).toEqual(true); - expect(argumentsParser.parse(['symlink', ...dummyRequiredArgs, '--symlink-relative', 'true', '--symlink-relative', 'false']).getSymlinkRelative()).toEqual(false); - expect(() => argumentsParser.parse(['link', ...dummyRequiredArgs, '--symlink-relative'])).toThrow(/dependent|implication/i); expect(argumentsParser.parse(['link', ...dummyRequiredArgs, '--symlink', '--symlink-relative']).getSymlinkRelative()).toEqual(true); expect(argumentsParser.parse(['link', ...dummyRequiredArgs, '--symlink', '--symlink-relative', 'true']).getSymlinkRelative()).toEqual(true); @@ -656,22 +640,30 @@ describe('options', () => { }); it('should parse "remove-headers"', () => { - const dat = new LogiqxDAT(new Header(), []); - // False - expect(argumentsParser.parse(dummyCommandAndRequiredArgs).canRemoveHeader(dat, '.smc')).toEqual(false); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, 'extract', '--remove-headers', '.smc']).canRemoveHeader(dat, '.rom')).toEqual(false); + expect(argumentsParser.parse(dummyCommandAndRequiredArgs).canRemoveHeader('.smc')).toEqual(false); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, 'extract', '--remove-headers', '.smc']).canRemoveHeader('.rom')).toEqual(false); // True - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, 'extract', '-H']).canRemoveHeader(dat, '')).toEqual(true); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, 'extract', '--remove-headers']).canRemoveHeader(dat, '')).toEqual(true); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, 'extract', '--remove-headers']).canRemoveHeader(dat, '.rom')).toEqual(true); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, 'extract', '--remove-headers']).canRemoveHeader(dat, '.smc')).toEqual(true); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, 'extract', '-H', '.smc']).canRemoveHeader(dat, 'filepath.smc')).toEqual(false); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, 'zip', '--remove-headers', 'smc']).canRemoveHeader(dat, '.smc')).toEqual(true); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, 'zip', '--remove-headers', '.smc']).canRemoveHeader(dat, '.SMC')).toEqual(true); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, 'zip', '-H', 'LNX,.smc']).canRemoveHeader(dat, '.smc')).toEqual(true); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, 'zip', '--remove-headers', 'lnx,.LNX']).canRemoveHeader(dat, '.LnX')).toEqual(true); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, 'extract', '-H']).canRemoveHeader('')).toEqual(true); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, 'extract', '--remove-headers']).canRemoveHeader('')).toEqual(true); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, 'extract', '--remove-headers']).canRemoveHeader('.rom')).toEqual(true); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, 'extract', '--remove-headers']).canRemoveHeader('.smc')).toEqual(true); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, 'extract', '-H', '.smc']).canRemoveHeader('filepath.smc')).toEqual(false); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, 'zip', '--remove-headers', 'smc']).canRemoveHeader('.smc')).toEqual(true); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, 'zip', '--remove-headers', '.smc']).canRemoveHeader('.SMC')).toEqual(true); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, 'zip', '-H', 'LNX,.smc']).canRemoveHeader('.smc')).toEqual(true); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, 'zip', '--remove-headers', 'lnx,.LNX']).canRemoveHeader('.LnX')).toEqual(true); + }); + + it('should parse "single"', () => { + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '-s']).getSingle()).toEqual(true); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--single']).getSingle()).toEqual(true); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--single', 'true']).getSingle()).toEqual(true); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--single', 'false']).getSingle()).toEqual(false); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--single', '--single']).getSingle()).toEqual(true); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--single', 'false', '--single', 'true']).getSingle()).toEqual(true); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--single', 'true', '--single', 'false']).getSingle()).toEqual(false); }); it('should parse "prefer-game-regex"', async () => { @@ -760,26 +752,13 @@ describe('options', () => { expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--single', '--prefer-region', 'USA,usa']).getPreferRegions()).toEqual(['USA']); }); - it('should parse "prefer-revision-newer"', () => { - expect(() => argumentsParser.parse([...dummyCommandAndRequiredArgs, '--prefer-revision-newer', '--prefer-revision-older', '--single'])).toThrow(/mutually exclusive/i); - expect(() => argumentsParser.parse([...dummyCommandAndRequiredArgs, '--prefer-revision-newer'])).toThrow(/dependent|implication/i); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--prefer-revision-newer', '--single']).getPreferRevisionNewer()).toEqual(true); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--prefer-revision-newer', 'true', '--single']).getPreferRevisionNewer()).toEqual(true); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--prefer-revision-newer', 'false', '--single']).getPreferRevisionNewer()).toEqual(false); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--prefer-revision-newer', '--prefer-revision-newer', '--single']).getPreferRevisionNewer()).toEqual(true); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--prefer-revision-newer', 'false', '--prefer-revision-newer', 'true', '--single']).getPreferRevisionNewer()).toEqual(true); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--prefer-revision-newer', 'true', '--prefer-revision-newer', 'false', '--single']).getPreferRevisionNewer()).toEqual(false); - }); - - it('should parse "prefer-revision-older"', () => { - expect(() => argumentsParser.parse([...dummyCommandAndRequiredArgs, '--prefer-revision-older', '--prefer-revision-newer', '--single'])).toThrow(/mutually exclusive/i); - expect(() => argumentsParser.parse([...dummyCommandAndRequiredArgs, '--prefer-revision-older'])).toThrow(/dependent|implication/i); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--prefer-revision-older', '--single']).getPreferRevisionOlder()).toEqual(true); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--prefer-revision-older', 'true', '--single']).getPreferRevisionOlder()).toEqual(true); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--prefer-revision-older', 'false', '--single']).getPreferRevisionOlder()).toEqual(false); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--prefer-revision-older', '--prefer-revision-older', '--single']).getPreferRevisionOlder()).toEqual(true); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--prefer-revision-older', 'false', '--prefer-revision-older', 'true', '--single']).getPreferRevisionOlder()).toEqual(true); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--prefer-revision-older', 'true', '--prefer-revision-older', 'false', '--single']).getPreferRevisionOlder()).toEqual(false); + it('should parse "prefer-revision"', () => { + expect(() => argumentsParser.parse([...dummyCommandAndRequiredArgs, '--prefer-revision', 'newer'])).toThrow(/dependent|implication/i); + expect(() => argumentsParser.parse([...dummyCommandAndRequiredArgs, '--prefer-revision', 'foobar']).getMergeRoms()).toThrow(/invalid values/i); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--single', '--prefer-revision', 'older']).getPreferRevision()).toEqual(PreferRevision.OLDER); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--single', '--prefer-revision', 'older', '--prefer-revision', 'newer']).getPreferRevision()).toEqual(PreferRevision.NEWER); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--single', '--prefer-revision', 'newer']).getPreferRevision()).toEqual(PreferRevision.NEWER); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--single', '--prefer-revision', 'newer', '--prefer-revision', 'older']).getPreferRevision()).toEqual(PreferRevision.OLDER); }); it('should parse "prefer-retail"', () => { @@ -792,28 +771,6 @@ describe('options', () => { expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--prefer-retail', 'true', '--prefer-retail', 'false', '--single']).getPreferRetail()).toEqual(false); }); - it('should parse "prefer-ntsc"', () => { - expect(() => argumentsParser.parse([...dummyCommandAndRequiredArgs, '--prefer-ntsc'])).toThrow(/dependent|implication/i); - expect(() => argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--prefer-ntsc', '--prefer-pal', '--single']).getPreferNTSC()).toThrow(/mutually exclusive/i); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--prefer-ntsc', '--single']).getPreferNTSC()).toEqual(true); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--prefer-ntsc', 'true', '--single']).getPreferNTSC()).toEqual(true); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--prefer-ntsc', 'false', '--single']).getPreferNTSC()).toEqual(false); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--prefer-ntsc', '--prefer-ntsc', '--single']).getPreferNTSC()).toEqual(true); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--prefer-ntsc', 'false', '--prefer-ntsc', 'true', '--single']).getPreferNTSC()).toEqual(true); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--prefer-ntsc', 'true', '--prefer-ntsc', 'false', '--single']).getPreferNTSC()).toEqual(false); - }); - - it('should parse "prefer-pal"', () => { - expect(() => argumentsParser.parse([...dummyCommandAndRequiredArgs, '--prefer-pal'])).toThrow(/dependent|implication/i); - expect(() => argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--prefer-pal', '--prefer-ntsc', '--single']).getPreferPAL()).toThrow(/mutually exclusive/i); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--prefer-pal', '--single']).getPreferPAL()).toEqual(true); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--prefer-pal', 'true', '--single']).getPreferPAL()).toEqual(true); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--prefer-pal', 'false', '--single']).getPreferPAL()).toEqual(false); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--prefer-pal', '--prefer-pal', '--single']).getPreferPAL()).toEqual(true); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--prefer-pal', 'false', '--prefer-pal', 'true', '--single']).getPreferPAL()).toEqual(true); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--prefer-pal', 'true', '--prefer-pal', 'false', '--single']).getPreferPAL()).toEqual(false); - }); - it('should parse "prefer-parent"', () => { expect(() => argumentsParser.parse([...dummyCommandAndRequiredArgs, '--prefer-parent'])).toThrow(/dependent|implication/i); expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--prefer-parent', '--single']).getPreferParent()).toEqual(true); @@ -825,23 +782,45 @@ describe('options', () => { }); it('should parse "merge-roms"', () => { + expect(() => argumentsParser.parse([...dummyCommandAndRequiredArgs, '--merge-roms', 'merged'])).toThrow(/dependent|implication/i); expect(argumentsParser.parse(dummyCommandAndRequiredArgs).getMergeRoms()) .toEqual(MergeMode.FULLNONMERGED); expect(() => argumentsParser.parse([...dummyCommandAndRequiredArgs, '--merge-roms', 'foobar']).getMergeRoms()).toThrow(/invalid values/i); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--merge-roms', 'fullnonmerged']).getMergeRoms()).toEqual(MergeMode.FULLNONMERGED); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--merge-roms', 'nonmerged']).getMergeRoms()).toEqual(MergeMode.NONMERGED); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--merge-roms', 'split']).getMergeRoms()).toEqual(MergeMode.SPLIT); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--merge-roms', 'merged']).getMergeRoms()).toEqual(MergeMode.MERGED); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--merge-roms', 'merged', '--merge-roms', 'split']).getMergeRoms()).toEqual(MergeMode.SPLIT); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--merge-roms', 'fullnonmerged']).getMergeRoms()).toEqual(MergeMode.FULLNONMERGED); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--merge-roms', 'nonmerged']).getMergeRoms()).toEqual(MergeMode.NONMERGED); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--merge-roms', 'split']).getMergeRoms()).toEqual(MergeMode.SPLIT); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--merge-roms', 'merged']).getMergeRoms()).toEqual(MergeMode.MERGED); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--merge-roms', 'merged', '--merge-roms', 'split']).getMergeRoms()).toEqual(MergeMode.SPLIT); + }); + + it('should parse "exclude-disks"', () => { + expect(() => argumentsParser.parse([...dummyCommandAndRequiredArgs, '--exclude-disks'])).toThrow(/dependent|implication/i); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--exclude-disks']).getExcludeDisks()).toEqual(true); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--exclude-disks', 'true']).getExcludeDisks()).toEqual(true); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--exclude-disks', 'false']).getExcludeDisks()).toEqual(false); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--exclude-disks', '--exclude-disks']).getExcludeDisks()).toEqual(true); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--exclude-disks', 'false', '--exclude-disks', 'true']).getExcludeDisks()).toEqual(true); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--exclude-disks', 'true', '--exclude-disks', 'false']).getExcludeDisks()).toEqual(false); + }); + + it('should parse "allow-excess-sets"', () => { + expect(() => argumentsParser.parse([...dummyCommandAndRequiredArgs, '--allow-excess-sets'])).toThrow(/dependent|implication/i); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--allow-excess-sets']).getAllowExcessSets()).toEqual(true); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--allow-excess-sets', 'true']).getAllowExcessSets()).toEqual(true); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--allow-excess-sets', 'false']).getAllowExcessSets()).toEqual(false); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--allow-excess-sets', '--allow-excess-sets']).getAllowExcessSets()).toEqual(true); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--allow-excess-sets', 'false', '--allow-excess-sets', 'true']).getAllowExcessSets()).toEqual(true); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--allow-excess-sets', 'true', '--allow-excess-sets', 'false']).getAllowExcessSets()).toEqual(false); }); it('should parse "allow-incomplete-sets"', () => { - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--allow-incomplete-sets']).getAllowIncompleteSets()).toEqual(true); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--allow-incomplete-sets', 'true']).getAllowIncompleteSets()).toEqual(true); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--allow-incomplete-sets', 'false']).getAllowIncompleteSets()).toEqual(false); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--allow-incomplete-sets', '--allow-incomplete-sets']).getAllowIncompleteSets()).toEqual(true); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--allow-incomplete-sets', 'false', '--allow-incomplete-sets', 'true']).getAllowIncompleteSets()).toEqual(true); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--allow-incomplete-sets', 'true', '--allow-incomplete-sets', 'false']).getAllowIncompleteSets()).toEqual(false); + expect(() => argumentsParser.parse([...dummyCommandAndRequiredArgs, '--allow-incomplete-sets'])).toThrow(/dependent|implication/i); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--allow-incomplete-sets']).getAllowIncompleteSets()).toEqual(true); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--allow-incomplete-sets', 'true']).getAllowIncompleteSets()).toEqual(true); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--allow-incomplete-sets', 'false']).getAllowIncompleteSets()).toEqual(false); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--allow-incomplete-sets', '--allow-incomplete-sets']).getAllowIncompleteSets()).toEqual(true); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--allow-incomplete-sets', 'false', '--allow-incomplete-sets', 'true']).getAllowIncompleteSets()).toEqual(true); + expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--dat', os.devNull, '--allow-incomplete-sets', 'true', '--allow-incomplete-sets', 'false']).getAllowIncompleteSets()).toEqual(false); }); it('should parse "filter-regex"', async () => { @@ -896,13 +875,6 @@ describe('options', () => { expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--filter-language', 'EN,it']).getFilterLanguage()).toEqual(new Set(['EN', 'IT'])); expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--filter-language', 'en,IT,JA']).getFilterLanguage()).toEqual(new Set(['EN', 'IT', 'JA'])); expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--filter-language', 'EN,en']).getFilterLanguage()).toEqual(new Set(['EN'])); - - expect(() => argumentsParser.parse([...dummyCommandAndRequiredArgs, '--language-filter'])).toThrow(/not enough arguments/i); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '-L', 'EN']).getFilterLanguage()).toEqual(new Set(['EN'])); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--language-filter', 'EN']).getFilterLanguage()).toEqual(new Set(['EN'])); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--language-filter', 'EN,it']).getFilterLanguage()).toEqual(new Set(['EN', 'IT'])); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--language-filter', 'en,IT,JA']).getFilterLanguage()).toEqual(new Set(['EN', 'IT', 'JA'])); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--language-filter', 'EN,en']).getFilterLanguage()).toEqual(new Set(['EN'])); }); it('should parse "filter-region"', () => { @@ -914,13 +886,6 @@ describe('options', () => { expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--filter-region', 'USA,eur']).getFilterRegion()).toEqual(new Set(['USA', 'EUR'])); expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--filter-region', 'usa,EUR,JPN']).getFilterRegion()).toEqual(new Set(['USA', 'EUR', 'JPN'])); expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--filter-region', 'USA,usa']).getFilterRegion()).toEqual(new Set(['USA'])); - - expect(() => argumentsParser.parse([...dummyCommandAndRequiredArgs, '--region-filter'])).toThrow(/not enough arguments/i); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '-R', 'USA']).getFilterRegion()).toEqual(new Set(['USA'])); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--region-filter', 'USA']).getFilterRegion()).toEqual(new Set(['USA'])); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--region-filter', 'USA,eur']).getFilterRegion()).toEqual(new Set(['USA', 'EUR'])); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--region-filter', 'usa,EUR,JPN']).getFilterRegion()).toEqual(new Set(['USA', 'EUR', 'JPN'])); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--region-filter', 'USA,usa']).getFilterRegion()).toEqual(new Set(['USA'])); }); it('should parse "no-bios"', () => { @@ -1093,14 +1058,6 @@ describe('options', () => { }); it('should parse "no-program-roms"', () => { - expect(() => argumentsParser.parse([...dummyCommandAndRequiredArgs, '--no-test-roms', '--only-test-roms'])).toThrow(/mutually exclusive/i); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--no-test-roms']).getNoProgram()).toEqual(true); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--no-test-roms', 'true']).getNoProgram()).toEqual(true); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--no-test-roms', 'false']).getNoProgram()).toEqual(false); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--no-test-roms', '--no-test-roms']).getNoProgram()).toEqual(true); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--no-test-roms', 'false', '--no-test-roms', 'true']).getNoProgram()).toEqual(true); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--no-test-roms', 'true', '--no-test-roms', 'false']).getNoProgram()).toEqual(false); - expect(() => argumentsParser.parse([...dummyCommandAndRequiredArgs, '--no-program', '--only-program'])).toThrow(/mutually exclusive/i); expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--no-program']).getNoProgram()).toEqual(true); expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--no-program', 'true']).getNoProgram()).toEqual(true); @@ -1111,14 +1068,6 @@ describe('options', () => { }); it('should parse "only-program"', () => { - expect(() => argumentsParser.parse([...dummyCommandAndRequiredArgs, '--only-test-roms', '--no-test-roms'])).toThrow(/mutually exclusive/i); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--only-test-roms']).getOnlyProgram()).toEqual(true); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--only-test-roms', 'true']).getOnlyProgram()).toEqual(true); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--only-test-roms', 'false']).getOnlyProgram()).toEqual(false); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--only-test-roms', '--only-test-roms']).getOnlyProgram()).toEqual(true); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--only-test-roms', 'false', '--only-test-roms', 'true']).getOnlyProgram()).toEqual(true); - expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--only-test-roms', 'true', '--only-test-roms', 'false']).getOnlyProgram()).toEqual(false); - expect(() => argumentsParser.parse([...dummyCommandAndRequiredArgs, '--only-program', '--no-program'])).toThrow(/mutually exclusive/i); expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--only-program']).getOnlyProgram()).toEqual(true); expect(argumentsParser.parse([...dummyCommandAndRequiredArgs, '--only-program', 'true']).getOnlyProgram()).toEqual(true); diff --git a/test/modules/candidateCombiner.test.ts b/test/modules/candidateCombiner.test.ts index fb6957b8d..2eb04d8c9 100644 --- a/test/modules/candidateCombiner.test.ts +++ b/test/modules/candidateCombiner.test.ts @@ -8,6 +8,8 @@ import ROMIndexer from '../../src/modules/romIndexer.js'; import ROMScanner from '../../src/modules/romScanner.js'; import Parent from '../../src/types/dats/parent.js'; import File from '../../src/types/files/file.js'; +import FileCache from '../../src/types/files/fileCache.js'; +import FileFactory from '../../src/types/files/fileFactory.js'; import Options from '../../src/types/options.js'; import ReleaseCandidate from '../../src/types/releaseCandidate.js'; import ProgressBarFake from '../console/progressBarFake.js'; @@ -17,10 +19,10 @@ async function runCombinedCandidateGenerator( romFiles: File[], ): Promise> { // Run DATGameInferrer, but condense all DATs down to one - const dats = new DATGameInferrer(options, new ProgressBarFake()).infer(romFiles); + const dats = await new DATGameInferrer(options, new ProgressBarFake()).infer(romFiles); const dat = new DATCombiner(new ProgressBarFake()).combine(dats); - const indexedRomFiles = await new ROMIndexer(options, new ProgressBarFake()).index(romFiles); + const indexedRomFiles = new ROMIndexer(options, new ProgressBarFake()).index(romFiles); const parentsToCandidates = await new CandidateGenerator(options, new ProgressBarFake()) .generate(dat, indexedRomFiles); @@ -33,7 +35,7 @@ it('should do nothing if option not specified', async () => { const options = new Options(); const romFiles = await new ROMScanner(new Options({ input: [path.join('test', 'fixtures', 'roms', 'raw')], - }), new ProgressBarFake()).scan(); + }), new ProgressBarFake(), new FileFactory(new FileCache())).scan(); // When const parentsToCandidates = await runCombinedCandidateGenerator(options, romFiles); @@ -59,7 +61,7 @@ it('should combine candidates', async () => { const options = new Options({ zipDatName: true }); const romFiles = await new ROMScanner(new Options({ input: [path.join('test', 'fixtures', 'roms', 'raw')], - }), new ProgressBarFake()).scan(); + }), new ProgressBarFake(), new FileFactory(new FileCache())).scan(); // When const parentsToCandidates = await runCombinedCandidateGenerator(options, romFiles); diff --git a/test/modules/candidateExtensionCorrector.test.ts b/test/modules/candidateExtensionCorrector.test.ts index 1c59c0ef3..4fe95b115 100644 --- a/test/modules/candidateExtensionCorrector.test.ts +++ b/test/modules/candidateExtensionCorrector.test.ts @@ -12,6 +12,8 @@ import LogiqxDAT from '../../src/types/dats/logiqx/logiqxDat.js'; import Parent from '../../src/types/dats/parent.js'; import ROM from '../../src/types/dats/rom.js'; import File from '../../src/types/files/file.js'; +import FileCache from '../../src/types/files/fileCache.js'; +import FileFactory from '../../src/types/files/fileFactory.js'; import Options, { FixExtension } from '../../src/types/options.js'; import ReleaseCandidate from '../../src/types/releaseCandidate.js'; import ROMWithFiles from '../../src/types/romWithFiles.js'; @@ -25,6 +27,7 @@ it('should do nothing with no parents', async () => { const correctedParentsToCandidates = await new CandidateExtensionCorrector( options, new ProgressBarFake(), + new FileFactory(new FileCache()), ).correct(dat, parentsToCandidates); expect(correctedParentsToCandidates).toBe(parentsToCandidates); @@ -55,6 +58,7 @@ it('should do nothing when no ROMs need correcting', async () => { const correctedParentsToCandidates = await new CandidateExtensionCorrector( options, new ProgressBarFake(), + new FileFactory(new FileCache()), ).correct(dat, parentsToCandidates); expect(correctedParentsToCandidates).toBe(parentsToCandidates); @@ -108,7 +112,11 @@ it('should correct ROMs without DATs', async () => { fixExtension: FixExtension[FixExtension.AUTO].toLowerCase(), }); const dat = new LogiqxDAT(new Header(), []); - const inputFiles = await new ROMScanner(options, new ProgressBarFake()).scan(); + const inputFiles = await new ROMScanner( + options, + new ProgressBarFake(), + new FileFactory(new FileCache()), + ).scan(); const tempDir = await FsPoly.mkdtemp(Temp.getTempDir()); try { @@ -141,6 +149,7 @@ it('should correct ROMs without DATs', async () => { const correctedParentsToCandidates = await new CandidateExtensionCorrector( options, new ProgressBarFake(), + new FileFactory(new FileCache()), ).correct(dat, parentsToCandidates); expectCorrectedCandidates(parentsToCandidates, correctedParentsToCandidates); @@ -156,7 +165,11 @@ it('should correct ROMs with missing filenames', async () => { fixExtension: FixExtension[FixExtension.AUTO].toLowerCase(), }); const dat = new LogiqxDAT(new Header(), []); - const inputFiles = await new ROMScanner(options, new ProgressBarFake()).scan(); + const inputFiles = await new ROMScanner( + options, + new ProgressBarFake(), + new FileFactory(new FileCache()), + ).scan(); const tempDir = await FsPoly.mkdtemp(Temp.getTempDir()); try { @@ -187,6 +200,7 @@ it('should correct ROMs with missing filenames', async () => { const correctedParentsToCandidates = await new CandidateExtensionCorrector( options, new ProgressBarFake(), + new FileFactory(new FileCache()), ).correct(dat, parentsToCandidates); expectCorrectedCandidates(parentsToCandidates, correctedParentsToCandidates); diff --git a/test/modules/candidateGenerator.test.ts b/test/modules/candidateGenerator.test.ts index ff117cca8..e799b3733 100644 --- a/test/modules/candidateGenerator.test.ts +++ b/test/modules/candidateGenerator.test.ts @@ -2,20 +2,25 @@ import path from 'node:path'; import CandidateGenerator from '../../src/modules/candidateGenerator.js'; import ROMIndexer from '../../src/modules/romIndexer.js'; +import ArrayPoly from '../../src/polyfill/arrayPoly.js'; import DAT from '../../src/types/dats/dat.js'; +import Disk from '../../src/types/dats/disk.js'; import Game from '../../src/types/dats/game.js'; import Header from '../../src/types/dats/logiqx/header.js'; import LogiqxDAT from '../../src/types/dats/logiqx/logiqxDat.js'; +import Machine from '../../src/types/dats/mame/machine.js'; +import MameDAT from '../../src/types/dats/mame/mameDat.js'; import Parent from '../../src/types/dats/parent.js'; import Release from '../../src/types/dats/release.js'; import ROM from '../../src/types/dats/rom.js'; import ArchiveEntry from '../../src/types/files/archives/archiveEntry.js'; import Rar from '../../src/types/files/archives/rar.js'; -import SevenZip from '../../src/types/files/archives/sevenZip.js'; +import SevenZip from '../../src/types/files/archives/sevenZip/sevenZip.js'; import Tar from '../../src/types/files/archives/tar.js'; import Zip from '../../src/types/files/archives/zip.js'; import File from '../../src/types/files/file.js'; import ROMHeader from '../../src/types/files/romHeader.js'; +import IndexedFiles from '../../src/types/indexedFiles.js'; import Options, { GameSubdirMode } from '../../src/types/options.js'; import ReleaseCandidate from '../../src/types/releaseCandidate.js'; import ProgressBarFake from '../console/progressBarFake.js'; @@ -72,7 +77,7 @@ async function candidateGenerator( files: (File | Promise)[], ): Promise> { const resolvedFiles = await Promise.all(files); - const indexedFiles = await new ROMIndexer(options, new ProgressBarFake()).index(resolvedFiles); + const indexedFiles = new ROMIndexer(options, new ProgressBarFake()).index(resolvedFiles); return new CandidateGenerator(options, new ProgressBarFake()).generate(dat, indexedFiles); } @@ -385,14 +390,7 @@ describe('with ROMs with headers', () => { }); const candidateWithTwoRoms = candidates[2]; - expect(candidateWithTwoRoms).toHaveLength(1); - candidateWithTwoRoms.forEach((candidate) => { - expect(candidate.getRomsWithFiles()).toHaveLength(2); - const candidateWithTwoRomsOutputOne = candidate.getRomsWithFiles()[0].getOutputFile(); - expect(candidateWithTwoRomsOutputOne.getFilePath()).toEqual('game with two ROMs (parent).7z'); // respected DAT and input extension - const candidateWithTwoRomsOutputTwo = candidate.getRomsWithFiles()[1].getOutputFile(); - expect(candidateWithTwoRomsOutputTwo.getFilePath()).toEqual('game with two ROMs (parent).7z'); // respected DAT and input extension - }); + expect(candidateWithTwoRoms).toHaveLength(0); // can't remove headers & leave the archive as-is }); }); @@ -480,83 +478,80 @@ describe('with different input files for every game ROM', () => { }); }); -describe.each(['copy', 'move'])('prefer input files from the same archive when raw writing: %s', (command) => { +describe.each(['copy', 'move'])('raw writing: %s', (command) => { const options = new Options({ commands: [command] }); - it('should behave like normal with only one ROM', async () => { - // Given - const datGame = gameWithOneRom; - expect(datGame.getRoms()).toHaveLength(1); - const dat = new LogiqxDAT(new Header(), [datGame]); - - // And every file is present, both raw and archived - const rawFiles = await Promise.all(dat.getGames() - .flatMap((game) => game.getRoms()) - .map(async (rom) => rom.toFile())); - const archive = new Zip('archive.zip'); - const archiveEntries = await Promise.all(dat.getGames() - .flatMap((game) => game.getRoms()) - .map(async (rom) => rom.toArchiveEntry(archive))); + describe('allow excess sets', () => { + const archive = new Zip('input.zip'); const files = [ - ...rawFiles, - ...archiveEntries, + // Matches a game with two ROMs + File.fileOf({ filePath: 'two.a', size: 2, crc32: 'abcdef90' }), + ArchiveEntry.entryOf({ + archive, + entryPath: 'two.b', + size: 3, + crc32: '09876543', + }), + // Excess file + ArchiveEntry.entryOf({ + archive, + entryPath: 'ninetynine.rom', + size: 9, + crc32: '99999999', + }), ]; - // When - const parentsToCandidates = await candidateGenerator(options, dat, files); - - // Then the Archive isn't used for any input file - expect(parentsToCandidates.size).toEqual(1); - const candidates = [...parentsToCandidates.values()]; - expect(candidates[0]).toHaveLength(Math.max(datGame.getReleases().length, 1)); - - for (const candidate of candidates) { - const firstCandidate = candidate[0]; - const romsWithFiles = firstCandidate.getRomsWithFiles(); - expect(romsWithFiles).toHaveLength(datGame.getRoms().length); - - for (const [idx, romsWithFile] of romsWithFiles.entries()) { - const inputFile = romsWithFile.getInputFile(); - expect(inputFile.getFilePath()).toEqual(datGame.getRoms()[idx].getName()); - } - } - }); - - describe.each([ - gameWithTwoRomsParent, - gameWithTwoRomsClone, - gameWithDuplicateRoms, - ])('game: %s', (datGame) => { - const dat = new LogiqxDAT(new Header(), [datGame]); - - it('should behave like normal with no archives', async () => { - // Given every file is present, raw - const rawFiles = await Promise.all(dat.getGames() - .flatMap((game) => game.getRoms()) - .map(async (rom) => rom.toFile())); + it('should return no candidates when input files have excess files and allowExcessSets:false', async () => { + // Given + const allowExcessOptions = new Options({ + ...options, + allowExcessSets: false, + }); // When - const parentsToCandidates = await candidateGenerator(options, dat, rawFiles); + const parentsToCandidates = await candidateGenerator( + allowExcessOptions, + datWithFourGames, + files, + ); + + // Then + const candidates = [...parentsToCandidates.entries()] + .filter(([parent]) => parent.getName() === gameWithTwoRomsParent.getName()) + .flatMap(([, releaseCandidates]) => releaseCandidates); + expect(candidates).toHaveLength(0); + }); - // Then the Archive isn't used for any input file - expect(parentsToCandidates.size).toEqual(1); - const candidates = [...parentsToCandidates.values()]; - expect(candidates[0]).toHaveLength(Math.max(datGame.getReleases().length, 1)); + it('should return candidates when input files have excess files and allowExcessSets:true', async () => { + // Given + const allowExcessOptions = new Options({ + ...options, + allowExcessSets: true, + }); - for (const candidate of candidates) { - const firstCandidate = candidate[0]; - const romsWithFiles = firstCandidate.getRomsWithFiles(); - expect(romsWithFiles).toHaveLength(datGame.getRoms().length); - - for (const [idx, romsWithFile] of romsWithFiles.entries()) { - const inputFile = romsWithFile.getInputFile(); - expect(inputFile.getFilePath()).toEqual(datGame.getRoms()[idx].getName()); - } - } + // When + const parentsToCandidates = await candidateGenerator( + allowExcessOptions, + datWithFourGames, + files, + ); + + // Then + const candidates = [...parentsToCandidates.entries()] + .filter(([parent]) => parent.getName() === gameWithTwoRomsParent.getName()) + .flatMap(([, releaseCandidates]) => releaseCandidates); + expect(candidates).toHaveLength(1); }); + }); + + describe('prefer input files from the same archive', () => { + it('should behave like normal with only one ROM', async () => { + // Given + const datGame = gameWithOneRom; + expect(datGame.getRoms()).toHaveLength(1); + const dat = new LogiqxDAT(new Header(), [datGame]); - it('should prefer input files from the same archive if it contains exactly every ROM', async () => { - // Given every file is present, both raw and archived + // And every file is present, both raw and archived const rawFiles = await Promise.all(dat.getGames() .flatMap((game) => game.getRoms()) .map(async (rom) => rom.toFile())); @@ -572,7 +567,7 @@ describe.each(['copy', 'move'])('prefer input files from the same archive when r // When const parentsToCandidates = await candidateGenerator(options, dat, files); - // Then the Archive is used for every input file + // Then the Archive isn't used for any input file expect(parentsToCandidates.size).toEqual(1); const candidates = [...parentsToCandidates.values()]; expect(candidates[0]).toHaveLength(Math.max(datGame.getReleases().length, 1)); @@ -582,51 +577,251 @@ describe.each(['copy', 'move'])('prefer input files from the same archive when r const romsWithFiles = firstCandidate.getRomsWithFiles(); expect(romsWithFiles).toHaveLength(datGame.getRoms().length); - for (const romsWithFile of romsWithFiles) { + for (const [idx, romsWithFile] of romsWithFiles.entries()) { const inputFile = romsWithFile.getInputFile(); - expect(inputFile.getFilePath()).toEqual(archive.getFilePath()); + expect(inputFile.getFilePath()).toEqual(datGame.getRoms()[idx].getName()); } } }); - it('should still prefer input archives that contain extra junk files', async () => { - // Given every file is present, both raw and archived, plus extra ArchiveEntries - const rawFiles = await Promise.all(dat.getGames() - .flatMap((game) => game.getRoms()) - .map(async (rom) => rom.toFile())); - const archive = new Zip('archive.zip'); - const archiveEntries = await Promise.all(dat.getGames() - .flatMap((game) => game.getRoms()) - .map(async (rom) => rom.toArchiveEntry(archive))); - const files = [ - ...rawFiles, - ...archiveEntries, - await ArchiveEntry.entryOf({ - archive, - entryPath: 'junk.rom', - size: 999, - crc32: '55555555', - }), - ]; + describe.each([ + gameWithTwoRomsParent, + gameWithTwoRomsClone, + gameWithDuplicateRoms, + ])('game: %s', (datGame) => { + const dat = new LogiqxDAT(new Header(), [datGame]); + + it('should behave like normal with no archives', async () => { + // Given every file is present, raw + const rawFiles = await Promise.all(dat.getGames() + .flatMap((game) => game.getRoms()) + .map(async (rom) => rom.toFile())); + + // When + const parentsToCandidates = await candidateGenerator(options, dat, rawFiles); + + // Then the Archive isn't used for any input file + expect(parentsToCandidates.size).toEqual(1); + const candidates = [...parentsToCandidates.values()]; + expect(candidates[0]).toHaveLength(Math.max(datGame.getReleases().length, 1)); + + for (const candidate of candidates) { + const firstCandidate = candidate[0]; + const romsWithFiles = firstCandidate.getRomsWithFiles(); + expect(romsWithFiles).toHaveLength(datGame.getRoms().length); + + for (const [idx, romsWithFile] of romsWithFiles.entries()) { + const inputFile = romsWithFile.getInputFile(); + expect(inputFile.getFilePath()).toEqual(datGame.getRoms()[idx].getName()); + } + } + }); + + it('should prefer input files from the same archive if it contains exactly every ROM', async () => { + // Given every file is present, both raw and archived + const rawFiles = await Promise.all(dat.getGames() + .flatMap((game) => game.getRoms()) + .map(async (rom) => rom.toFile())); + const archive = new Zip('archive.zip'); + const archiveEntries = await Promise.all(dat.getGames() + .flatMap((game) => game.getRoms()) + .map(async (rom) => rom.toArchiveEntry(archive))); + const files = [ + ...rawFiles, + ...archiveEntries, + ]; + + // When + const parentsToCandidates = await candidateGenerator(options, dat, files); + + // Then the Archive is used for every input file + expect(parentsToCandidates.size).toEqual(1); + const candidates = [...parentsToCandidates.values()]; + expect(candidates[0]).toHaveLength(Math.max(datGame.getReleases().length, 1)); + + for (const candidate of candidates) { + const firstCandidate = candidate[0]; + const romsWithFiles = firstCandidate.getRomsWithFiles(); + expect(romsWithFiles).toHaveLength(datGame.getRoms().length); + + for (const romsWithFile of romsWithFiles) { + const inputFile = romsWithFile.getInputFile(); + expect(inputFile.getFilePath()).toEqual(archive.getFilePath()); + } + } + }); + + it('should prefer input archives that contain extra junk files when allowExcessSets:true', async () => { + const allowExcessOptions = new Options({ + ...options, + allowExcessSets: true, + }); + + // Given every file is present, both raw and archived, plus extra ArchiveEntries + const rawFiles = await Promise.all(dat.getGames() + .flatMap((game) => game.getRoms()) + .map(async (rom) => rom.toFile())); + const archive = new Zip('archive.zip'); + const archiveEntries = await Promise.all(dat.getGames() + .flatMap((game) => game.getRoms()) + .map(async (rom) => rom.toArchiveEntry(archive))); + const files = [ + ...rawFiles, + ...archiveEntries, + await ArchiveEntry.entryOf({ + archive, + entryPath: 'junk.rom', + size: 999, + crc32: '55555555', + }), + ]; + + // When + const parentsToCandidates = await candidateGenerator(allowExcessOptions, dat, files); + + // Then the Archive is used for every input file + expect(parentsToCandidates.size).toEqual(1); + const candidates = [...parentsToCandidates.values()]; + expect(candidates[0]).toHaveLength(Math.max(datGame.getReleases().length, 1)); + + for (const candidate of candidates) { + const firstCandidate = candidate[0]; + const romsWithFiles = firstCandidate.getRomsWithFiles(); + expect(romsWithFiles).toHaveLength(datGame.getRoms().length); + + for (const romsWithFile of romsWithFiles) { + const inputFile = romsWithFile.getInputFile(); + expect(inputFile.getFilePath()).toEqual(archive.getFilePath()); + } + } + }); + }); + }); +}); - // When - const parentsToCandidates = await candidateGenerator(options, dat, files); +describe('MAME v0.260', () => { + const mameDat = new MameDAT([ + new Machine({ + name: '2spicy', + romOf: 'lindbios', + description: '2 Spicy', + rom: [ + new ROM({ name: '6.0.0010a.bin', size: 1_048_576, crc32: '10dd9b76' }), + new ROM({ name: '6.0.0009.bin', size: 1_048_576, crc32: '5ffdfbf8' }), + new ROM({ name: '6.0.0010.bin', size: 1_048_576, crc32: 'ea2bf888' }), + new ROM({ name: 'fpr-24370b.ic6', size: 4_194_304, crc32: 'c3b021a4' }), + new ROM({ name: 'vid_bios.u504', size: 65_536, crc32: 'f78d14d7' }), + // new ROM({ name: '317-0491-com.bin', size: 8192 }), + ], + disk: [ + new Disk({ name: 'mda-c0004a_revb_lindyellow_v2.4.20_mvl31a_boot_2.01', sha1: 'e13da5f827df852e742b594729ee3f933b387410' }), + new Disk({ name: 'dvp-0027a', sha1: 'da1aacee9e32e813844f4d434981e69cc5c80682' }), + ], + }), + new Machine({ + name: 'area51mx', + description: 'Area 51 / Maximum Force Duo v2.0', + rom: [ + new ROM({ name: '2.0_68020_max-a51_kit_3h.3h', size: 524_288, crc32: '47cbf30b' }), + new ROM({ name: '2.0_68020_max-a51_kit_3p.3p', size: 524_288, crc32: 'a3c93684' }), + new ROM({ name: '2.0_68020_max-a51_kit_3m.3m', size: 524_288, crc32: 'd800ac17' }), + new ROM({ name: '2.0_68020_max-a51_kit_3k.3k', size: 524_288, crc32: '0e78f308' }), + new ROM({ name: 'jagwave.rom', size: 4096, crc32: '7a25ee5b' }), + ], + disk: new Disk({ name: 'area51mx', sha1: '5ff10f4e87094d4449eabf3de7549564ca568c7e' }), + }), + new Machine({ + name: 'a51mxr3k', + cloneOf: 'area51mx', + romOf: 'area51mx', + description: 'Area 51 / Maximum Force Duo (R3000, 2/10/98)', + rom: [ + new ROM({ name: '1.0_r3k_max-a51_kit_hh.hh', size: 524_288, crc32: 'a984dab2' }), + new ROM({ name: '1.0_r3k_max-a51_kit_hl.hl', size: 524_288, crc32: '0af49d74' }), + new ROM({ name: '1.0_r3k_max-a51_kit_lh.lh', size: 524_288, crc32: 'd7d94dac' }), + new ROM({ name: '1.0_r3k_max-a51_kit_ll.ll', size: 524_288, crc32: 'ece9e5ae' }), + new ROM({ name: 'jagwave.rom', size: 4096, crc32: '7a25ee5b' }), + ], + disk: new Disk({ name: 'area51mx', sha1: '5ff10f4e87094d4449eabf3de7549564ca568c7e' }), + }), + ]); - // Then the Archive is used for every input file - expect(parentsToCandidates.size).toEqual(1); - const candidates = [...parentsToCandidates.values()]; - expect(candidates[0]).toHaveLength(Math.max(datGame.getReleases().length, 1)); + const mameIndexedFiles = Promise.all( + mameDat.getGames() + .flatMap((game) => [...game.getRoms(), ...game.getDisks()]) + .map(async (rom) => rom.toFile()), + ) + .then((files) => files.filter(ArrayPoly.filterUniqueMapped((file) => file.hashCode()))) + .then((files) => IndexedFiles.fromFiles(files)); - for (const candidate of candidates) { - const firstCandidate = candidate[0]; - const romsWithFiles = firstCandidate.getRomsWithFiles(); - expect(romsWithFiles).toHaveLength(datGame.getRoms().length); + it('should include disks by default', async () => { + const options = new Options({ + commands: ['copy', 'zip'], + dirGameSubdir: GameSubdirMode[GameSubdirMode.MULTIPLE].toLowerCase(), + }); - for (const romsWithFile of romsWithFiles) { - const inputFile = romsWithFile.getInputFile(); - expect(inputFile.getFilePath()).toEqual(archive.getFilePath()); - } - } + const candidates = await new CandidateGenerator(options, new ProgressBarFake()) + .generate(mameDat, await mameIndexedFiles); + + const outputFiles = [...candidates.values()] + .flat() + .flatMap((releaseCandidate) => releaseCandidate.getRomsWithFiles()) + .map((romWithFiles) => romWithFiles.getOutputFile().toString()) + .sort(); + expect(outputFiles).toEqual([ + '2spicy.zip|6.0.0009.bin', + '2spicy.zip|6.0.0010.bin', + '2spicy.zip|6.0.0010a.bin', + '2spicy.zip|fpr-24370b.ic6', + '2spicy.zip|vid_bios.u504', + path.join('2spicy', 'dvp-0027a'), + path.join('2spicy', 'mda-c0004a_revb_lindyellow_v2.4.20_mvl31a_boot_2.01'), + 'a51mxr3k.zip|1.0_r3k_max-a51_kit_hh.hh', + 'a51mxr3k.zip|1.0_r3k_max-a51_kit_hl.hl', + 'a51mxr3k.zip|1.0_r3k_max-a51_kit_lh.lh', + 'a51mxr3k.zip|1.0_r3k_max-a51_kit_ll.ll', + 'a51mxr3k.zip|jagwave.rom', + path.join('a51mxr3k', 'area51mx'), + 'area51mx.zip|2.0_68020_max-a51_kit_3h.3h', + 'area51mx.zip|2.0_68020_max-a51_kit_3k.3k', + 'area51mx.zip|2.0_68020_max-a51_kit_3m.3m', + 'area51mx.zip|2.0_68020_max-a51_kit_3p.3p', + 'area51mx.zip|jagwave.rom', + path.join('area51mx', 'area51mx'), + ]); + }); + + it('should not include disks', async () => { + const options = new Options({ + commands: ['copy'], + dirGameSubdir: GameSubdirMode[GameSubdirMode.MULTIPLE].toLowerCase(), + excludeDisks: true, }); + + const candidates = await new CandidateGenerator(options, new ProgressBarFake()) + .generate(mameDat, await mameIndexedFiles); + + const outputFiles = [...candidates.values()] + .flat() + .flatMap((releaseCandidate) => releaseCandidate.getRomsWithFiles()) + .map((romWithFiles) => romWithFiles.getOutputFile().toString()) + .sort(); + expect(outputFiles).toEqual([ + path.join('2spicy', '6.0.0009.bin'), + path.join('2spicy', '6.0.0010.bin'), + path.join('2spicy', '6.0.0010a.bin'), + path.join('2spicy', 'fpr-24370b.ic6'), + path.join('2spicy', 'vid_bios.u504'), + path.join('a51mxr3k', '1.0_r3k_max-a51_kit_hh.hh'), + path.join('a51mxr3k', '1.0_r3k_max-a51_kit_hl.hl'), + path.join('a51mxr3k', '1.0_r3k_max-a51_kit_lh.lh'), + path.join('a51mxr3k', '1.0_r3k_max-a51_kit_ll.ll'), + path.join('a51mxr3k', 'jagwave.rom'), + path.join('area51mx', '2.0_68020_max-a51_kit_3h.3h'), + path.join('area51mx', '2.0_68020_max-a51_kit_3k.3k'), + path.join('area51mx', '2.0_68020_max-a51_kit_3m.3m'), + path.join('area51mx', '2.0_68020_max-a51_kit_3p.3p'), + path.join('area51mx', 'jagwave.rom'), + ]); }); }); diff --git a/test/modules/candidateMergeSplitValidator.test.ts b/test/modules/candidateMergeSplitValidator.test.ts index dce0d8209..5b02551bc 100644 --- a/test/modules/candidateMergeSplitValidator.test.ts +++ b/test/modules/candidateMergeSplitValidator.test.ts @@ -55,7 +55,7 @@ describe('missing parents', () => { const options = new Options({ mergeRoms }); const parentsToCandidates = await datToCandidates(dat); - const missingGames = await new CandidateMergeSplitValidator(options, new ProgressBarFake()) + const missingGames = new CandidateMergeSplitValidator(options, new ProgressBarFake()) .validate(dat, parentsToCandidates); expect(missingGames).toEqual([]); }); @@ -66,7 +66,7 @@ describe('missing parents', () => { }); const parentsToCandidates = await datToCandidates(dat); - const missingGames = await new CandidateMergeSplitValidator(options, new ProgressBarFake()) + const missingGames = new CandidateMergeSplitValidator(options, new ProgressBarFake()) .validate(dat, parentsToCandidates); expect(missingGames).toEqual(['grandparent']); }); @@ -103,7 +103,7 @@ describe('device refs', () => { .filter(([, candidate]) => candidate.some((rc) => !rc.getGame().isDevice())), ); - const missingGames = await new CandidateMergeSplitValidator(options, new ProgressBarFake()) + const missingGames = new CandidateMergeSplitValidator(options, new ProgressBarFake()) .validate(dat, parentsToCandidates); expect(missingGames).toEqual([]); }); @@ -121,7 +121,7 @@ describe('device refs', () => { .filter(([, candidate]) => candidate.some((rc) => !rc.getGame().isDevice())), ); - const missingGames = await new CandidateMergeSplitValidator(options, new ProgressBarFake()) + const missingGames = new CandidateMergeSplitValidator(options, new ProgressBarFake()) .validate(dat, parentsToCandidates); expect(missingGames).toEqual(['screen']); }); diff --git a/test/modules/candidatePatchGenerator.test.ts b/test/modules/candidatePatchGenerator.test.ts index f77c236c6..c8b6093f6 100644 --- a/test/modules/candidatePatchGenerator.test.ts +++ b/test/modules/candidatePatchGenerator.test.ts @@ -13,13 +13,15 @@ import Header from '../../src/types/dats/logiqx/header.js'; import LogiqxDAT from '../../src/types/dats/logiqx/logiqxDat.js'; import Parent from '../../src/types/dats/parent.js'; import File from '../../src/types/files/file.js'; +import FileCache from '../../src/types/files/fileCache.js'; +import FileFactory from '../../src/types/files/fileFactory.js'; import Options from '../../src/types/options.js'; import ReleaseCandidate from '../../src/types/releaseCandidate.js'; import ProgressBarFake from '../console/progressBarFake.js'; // Run DATGameInferrer, but condense all DATs down to one -function buildInferredDat(options: Options, romFiles: File[]): DAT { - const dats = new DATGameInferrer(options, new ProgressBarFake()).infer(romFiles); +async function buildInferredDat(options: Options, romFiles: File[]): Promise { + const dats = await new DATGameInferrer(options, new ProgressBarFake()).infer(romFiles); return new DATCombiner(new ProgressBarFake()).combine(dats); } @@ -32,11 +34,15 @@ async function runPatchCandidateGenerator( patch: [path.join('test', 'fixtures', 'patches')], }); - const indexedRomFiles = await new ROMIndexer(options, new ProgressBarFake()).index(romFiles); + const indexedRomFiles = new ROMIndexer(options, new ProgressBarFake()).index(romFiles); const parentsToCandidates = await new CandidateGenerator(options, new ProgressBarFake()) .generate(dat, indexedRomFiles); - const patches = await new PatchScanner(options, new ProgressBarFake()).scan(); + const patches = await new PatchScanner( + options, + new ProgressBarFake(), + new FileFactory(new FileCache()), + ).scan(); return new CandidatePatchGenerator(new ProgressBarFake()) .generate(dat, parentsToCandidates, patches); @@ -59,8 +65,12 @@ describe('with inferred DATs', () => { const options = new Options({ input: [path.join('test', 'fixtures', 'roms', 'headered')], }); - const romFiles = await new ROMScanner(options, new ProgressBarFake()).scan(); - const dat = buildInferredDat(options, romFiles); + const romFiles = await new ROMScanner( + options, + new ProgressBarFake(), + new FileFactory(new FileCache()), + ).scan(); + const dat = await buildInferredDat(options, romFiles); // When const parentsToCandidates = await runPatchCandidateGenerator(dat, romFiles); @@ -76,8 +86,12 @@ describe('with inferred DATs', () => { const options = new Options({ input: [path.join('test', 'fixtures', 'roms', 'patchable')], }); - const romFiles = await new ROMScanner(options, new ProgressBarFake()).scan(); - const dat = buildInferredDat(options, romFiles); + const romFiles = await new ROMScanner( + options, + new ProgressBarFake(), + new FileFactory(new FileCache()), + ).scan(); + const dat = await buildInferredDat(options, romFiles); // When const parentsToCandidates = await runPatchCandidateGenerator(dat, romFiles); @@ -96,8 +110,16 @@ describe('with explicit DATs', () => { dat: [path.join('test', 'fixtures', 'dats', 'smdb*')], input: [path.join('test', 'fixtures', 'roms', 'patchable')], }); - const dat = (await new DATScanner(options, new ProgressBarFake()).scan())[0]; - const romFiles = await new ROMScanner(options, new ProgressBarFake()).scan(); + const dat = (await new DATScanner( + options, + new ProgressBarFake(), + new FileFactory(new FileCache()), + ).scan())[0]; + const romFiles = await new ROMScanner( + options, + new ProgressBarFake(), + new FileFactory(new FileCache()), + ).scan(); // And pre-assert all Game names and ROM names have path separators in them const totalRoms = dat.getGames().reduce((gameSum, game) => gameSum + game.getRoms().length, 0); diff --git a/test/modules/candidatePreferer.test.ts b/test/modules/candidatePreferer.test.ts index fb40d5e1b..6ca298f8c 100644 --- a/test/modules/candidatePreferer.test.ts +++ b/test/modules/candidatePreferer.test.ts @@ -5,7 +5,7 @@ import LogiqxDAT from '../../src/types/dats/logiqx/logiqxDat.js'; import Parent from '../../src/types/dats/parent.js'; import Release from '../../src/types/dats/release.js'; import ROM from '../../src/types/dats/rom.js'; -import Options, { OptionsProps } from '../../src/types/options.js'; +import Options, { OptionsProps, PreferRevision } from '../../src/types/options.js'; import ReleaseCandidate from '../../src/types/releaseCandidate.js'; import ROMWithFiles from '../../src/types/romWithFiles.js'; import ProgressBarFake from '../console/progressBarFake.js'; @@ -29,14 +29,14 @@ async function expectFilteredCandidates( expect(totalCandidates).toEqual(expectedSize); } -async function expectPreferredCandidates( +function expectPreferredCandidates( options: OptionsProps, parentsToCandidates: [Parent, ReleaseCandidate[]][], expectedNames: string[], -): Promise { +): void { const dat = new LogiqxDAT(new Header(), []); - const filteredParentsToCandidates = await buildCandidateFilter(options) + const filteredParentsToCandidates = buildCandidateFilter(options) .prefer(dat, new Map(parentsToCandidates)); // Assert CandidateFilter doesn't affect the number of parents expect(filteredParentsToCandidates.size).toEqual(parentsToCandidates.length); @@ -136,7 +136,7 @@ it('should return nothing if no parent has release candidates', async () => { describe('sort', () => { describe('prefer game regex', () => { it('should return the first candidate when option is empty', async () => { - await expectPreferredCandidates({ preferGameRegex: undefined, single: true }, [ + expectPreferredCandidates({ preferGameRegex: undefined, single: true }, [ await buildReleaseCandidatesWithRegionLanguage(['one'], [], 'EN'), await buildReleaseCandidatesWithRegionLanguage(['two', 'three'], [], 'EN'), await buildReleaseCandidatesWithRegionLanguage(['four', 'five', 'six'], [], 'EN'), @@ -144,7 +144,7 @@ describe('sort', () => { }); it('should return the first candidate when none matching', async () => { - await expectPreferredCandidates({ preferGameRegex: 'NINE', single: true }, [ + expectPreferredCandidates({ preferGameRegex: 'NINE', single: true }, [ await buildReleaseCandidatesWithRegionLanguage(['one'], [], 'EN'), await buildReleaseCandidatesWithRegionLanguage(['two', 'three'], [], 'EN'), await buildReleaseCandidatesWithRegionLanguage(['four', 'five', 'six'], [], 'EN'), @@ -152,7 +152,7 @@ describe('sort', () => { }); it('should return the first matching candidate when some matching', async () => { - await expectPreferredCandidates({ preferGameRegex: '/THREE|five/i', single: true }, [ + expectPreferredCandidates({ preferGameRegex: '/THREE|five/i', single: true }, [ await buildReleaseCandidatesWithRegionLanguage(['one'], [], 'EN'), await buildReleaseCandidatesWithRegionLanguage(['two', 'three'], [], 'EN'), await buildReleaseCandidatesWithRegionLanguage(['four', 'five', 'six'], [], 'EN'), @@ -160,7 +160,7 @@ describe('sort', () => { }); it('should return the first candidate when all matching', async () => { - await expectPreferredCandidates({ preferGameRegex: '[aeiou]', single: true }, [ + expectPreferredCandidates({ preferGameRegex: '[aeiou]', single: true }, [ await buildReleaseCandidatesWithRegionLanguage(['one'], [], 'EN'), await buildReleaseCandidatesWithRegionLanguage(['two', 'three'], [], 'EN'), await buildReleaseCandidatesWithRegionLanguage(['four', 'five', 'six'], [], 'EN'), @@ -170,7 +170,7 @@ describe('sort', () => { describe('prefer rom regex', () => { it('should return the first candidate when option is empty', async () => { - await expectPreferredCandidates({ preferRomRegex: undefined, single: true }, [ + expectPreferredCandidates({ preferRomRegex: undefined, single: true }, [ await buildReleaseCandidatesWithRegionLanguage(['one'], [], 'EN'), await buildReleaseCandidatesWithRegionLanguage(['two', 'three'], [], 'EN'), await buildReleaseCandidatesWithRegionLanguage(['four', 'five', 'six'], [], 'EN'), @@ -178,7 +178,7 @@ describe('sort', () => { }); it('should return the first candidate when none matching', async () => { - await expectPreferredCandidates({ preferRomRegex: '/five\\.nes/i', single: true }, [ + expectPreferredCandidates({ preferRomRegex: '/five\\.nes/i', single: true }, [ await buildReleaseCandidatesWithRegionLanguage(['one'], [], 'EN'), await buildReleaseCandidatesWithRegionLanguage(['two', 'three'], [], 'EN'), await buildReleaseCandidatesWithRegionLanguage(['four', 'five', 'six'], [], 'EN'), @@ -186,7 +186,7 @@ describe('sort', () => { }); it('should return the first matching candidate when some matching', async () => { - await expectPreferredCandidates({ preferRomRegex: '/THREE|five\\.rom/i', single: true }, [ + expectPreferredCandidates({ preferRomRegex: '/THREE|five\\.rom/i', single: true }, [ await buildReleaseCandidatesWithRegionLanguage(['one'], [], 'EN'), await buildReleaseCandidatesWithRegionLanguage(['two', 'three'], [], 'EN'), await buildReleaseCandidatesWithRegionLanguage(['four', 'five', 'six'], [], 'EN'), @@ -194,7 +194,7 @@ describe('sort', () => { }); it('should return the first candidate when all matching', async () => { - await expectPreferredCandidates({ preferRomRegex: '[aeiou]', single: true }, [ + expectPreferredCandidates({ preferRomRegex: '[aeiou]', single: true }, [ await buildReleaseCandidatesWithRegionLanguage(['one'], [], 'EN'), await buildReleaseCandidatesWithRegionLanguage(['two', 'three'], [], 'EN'), await buildReleaseCandidatesWithRegionLanguage(['four', 'five', 'six'], [], 'EN'), @@ -204,7 +204,7 @@ describe('sort', () => { describe('prefer verified', () => { it('should return the first candidate when option is false', async () => { - await expectPreferredCandidates({ preferVerified: false, single: true }, [ + expectPreferredCandidates({ preferVerified: false, single: true }, [ await buildReleaseCandidatesWithRegionLanguage(['one'], [], 'EN'), await buildReleaseCandidatesWithRegionLanguage(['two', 'two [!]'], [], 'EN'), await buildReleaseCandidatesWithRegionLanguage(['three [!]', 'three'], [], 'EN'), @@ -212,7 +212,7 @@ describe('sort', () => { }); it('should return the first candidate when none matching', async () => { - await expectPreferredCandidates({ preferVerified: true, single: true }, [ + expectPreferredCandidates({ preferVerified: true, single: true }, [ await buildReleaseCandidatesWithRegionLanguage(['one'], [], 'EN'), await buildReleaseCandidatesWithRegionLanguage(['two', 'two two'], [], 'EN'), await buildReleaseCandidatesWithRegionLanguage(['three', 'three three'], [], 'EN'), @@ -220,7 +220,7 @@ describe('sort', () => { }); it('should return the first matching candidate when some matching', async () => { - await expectPreferredCandidates({ preferVerified: true, single: true }, [ + expectPreferredCandidates({ preferVerified: true, single: true }, [ await buildReleaseCandidatesWithRegionLanguage(['one'], [], 'EN'), await buildReleaseCandidatesWithRegionLanguage(['two', 'two [!]'], [], 'EN'), await buildReleaseCandidatesWithRegionLanguage(['three [!]', 'three'], [], 'EN'), @@ -228,7 +228,7 @@ describe('sort', () => { }); it('should return the first candidate when all matching', async () => { - await expectPreferredCandidates({ preferVerified: true, single: true }, [ + expectPreferredCandidates({ preferVerified: true, single: true }, [ await buildReleaseCandidatesWithRegionLanguage(['one [!]'], [], 'EN'), await buildReleaseCandidatesWithRegionLanguage(['two [!]', 'two two [!]'], [], 'EN'), await buildReleaseCandidatesWithRegionLanguage(['three [!]', 'three three [!]'], [], 'EN'), @@ -238,7 +238,7 @@ describe('sort', () => { describe('prefer good', () => { it('should return the first candidate when option is false', async () => { - await expectPreferredCandidates({ preferGood: false, single: true }, [ + expectPreferredCandidates({ preferGood: false, single: true }, [ await buildReleaseCandidatesWithRegionLanguage(['one'], 'USA', 'EN'), await buildReleaseCandidatesWithRegionLanguage(['two', 'two [b]'], 'USA', 'EN'), await buildReleaseCandidatesWithRegionLanguage(['three [b]', 'three'], 'USA', 'EN'), @@ -246,7 +246,7 @@ describe('sort', () => { }); it('should return the first candidate when none matching', async () => { - await expectPreferredCandidates({ preferGood: true, single: true }, [ + expectPreferredCandidates({ preferGood: true, single: true }, [ await buildReleaseCandidatesWithRegionLanguage(['one'], 'USA', 'EN'), await buildReleaseCandidatesWithRegionLanguage(['two', 'two two'], 'USA', 'EN'), await buildReleaseCandidatesWithRegionLanguage(['three', 'three three'], 'USA', 'EN'), @@ -254,7 +254,7 @@ describe('sort', () => { }); it('should return the first matching candidate when some matching', async () => { - await expectPreferredCandidates({ preferGood: true, single: true }, [ + expectPreferredCandidates({ preferGood: true, single: true }, [ await buildReleaseCandidatesWithRegionLanguage(['one'], 'USA', 'EN'), await buildReleaseCandidatesWithRegionLanguage(['two', 'two [b]'], 'USA', 'EN'), await buildReleaseCandidatesWithRegionLanguage(['three [b]', 'three'], 'USA', 'EN'), @@ -262,7 +262,7 @@ describe('sort', () => { }); it('should return the first candidate when all matching', async () => { - await expectPreferredCandidates({ preferGood: true, single: true }, [ + expectPreferredCandidates({ preferGood: true, single: true }, [ await buildReleaseCandidatesWithRegionLanguage(['one [b]'], 'USA', 'EN'), await buildReleaseCandidatesWithRegionLanguage(['two [b]', 'two two [b]'], 'USA', 'EN'), await buildReleaseCandidatesWithRegionLanguage(['three [b]', 'three three [b]'], 'USA', 'EN'), @@ -272,7 +272,7 @@ describe('sort', () => { describe('prefer languages', () => { it('should return the first candidate when option is empty', async () => { - await expectPreferredCandidates({ preferLanguage: [], single: true }, [ + expectPreferredCandidates({ preferLanguage: [], single: true }, [ await buildReleaseCandidatesWithRegionLanguage('one', 'USA', 'EN'), await buildReleaseCandidatesWithRegionLanguage('two', 'USA', ['ES', 'EN']), await buildReleaseCandidatesWithRegionLanguage('three', 'JPN', 'JA'), @@ -283,7 +283,7 @@ describe('sort', () => { }); it('should return the first candidate when none matching', async () => { - await expectPreferredCandidates({ preferLanguage: ['EN'], single: true }, [ + expectPreferredCandidates({ preferLanguage: ['EN'], single: true }, [ await buildReleaseCandidatesWithRegionLanguage('one', 'SPA', 'ES'), await buildReleaseCandidatesWithRegionLanguage('two', 'JPN', 'JA'), await buildReleaseCandidatesWithRegionLanguage('three', 'EUR', ['DE', 'IT']), @@ -292,7 +292,7 @@ describe('sort', () => { }); it('should return the first matching candidate when some matching', async () => { - await expectPreferredCandidates({ preferLanguage: ['EN', 'JA'], single: true }, [ + expectPreferredCandidates({ preferLanguage: ['EN', 'JA'], single: true }, [ await buildReleaseCandidatesWithRegionLanguage('one', 'USA', 'EN'), await buildReleaseCandidatesWithRegionLanguage('two', 'USA', ['ES', 'EN']), await buildReleaseCandidatesWithRegionLanguage('three', 'JPN', 'JA'), @@ -302,7 +302,7 @@ describe('sort', () => { ], ['one (USA) (EN)', 'two (USA) (EN)', 'three (JPN) (JA)', 'four (JPN) (EN)', 'five (EUR) (DE)', 'six (EUR)']); }); - it('should treat "World" as English', async () => { + it('should treat "World" as English', () => { const gameParent = new Game({ name: 'Akumajou Special - Boku Dracula-kun (Japan)', release: new Release('Akumajou Special - Boku Dracula-kun (Japan)', 'JPN') }); const gameWorldJa = new Game({ name: 'Akumajou Special - Boku Dracula-kun (World) (Ja) (Castlevania Anniversary Collection)' }); const gameWorld = new Game({ name: 'Kid Dracula (World) (Castlevania Anniversary Collection)' }); @@ -310,7 +310,7 @@ describe('sort', () => { const parent = new Parent(gameParent, games); const releaseCandidates = games .map((game) => new ReleaseCandidate(game, game.getReleases()[0], [])); - await expectPreferredCandidates({ single: true, preferLanguage: ['EN'] }, [[parent, releaseCandidates]], [gameWorld.getName()]); + expectPreferredCandidates({ single: true, preferLanguage: ['EN'] }, [[parent, releaseCandidates]], [gameWorld.getName()]); }); test.each([ @@ -338,17 +338,17 @@ describe('sort', () => { ['SV', 'DE'], 'Tintin in Tibet (Europe) (En,Es,Sv)', ], - ])('should rank candidates by all preferred languages: %s', async (gameNames, preferLanguage, expectedName) => { + ])('should rank candidates by all preferred languages: %s', (gameNames, preferLanguage, expectedName) => { const games = gameNames.map((gameName) => new Game({ name: gameName })); const parent = new Parent(games[0], games); const releaseCandidates = games.map((game) => new ReleaseCandidate(game, undefined, [])); - await expectPreferredCandidates({ preferLanguage, single: true }, [ + expectPreferredCandidates({ preferLanguage, single: true }, [ [parent, releaseCandidates], ], [expectedName]); }); it('should return the first candidate when all matching', async () => { - await expectPreferredCandidates({ preferLanguage: ['EN', 'JA'], single: true }, [ + expectPreferredCandidates({ preferLanguage: ['EN', 'JA'], single: true }, [ await buildReleaseCandidatesWithRegionLanguage('one', 'USA', 'EN'), await buildReleaseCandidatesWithRegionLanguage('two', 'USA', ['ES', 'EN']), await buildReleaseCandidatesWithRegionLanguage('three', 'JPN', 'JA'), @@ -360,7 +360,7 @@ describe('sort', () => { describe('prefer regions', () => { it('should return the first candidate when option is false', async () => { - await expectPreferredCandidates({ preferRegion: [], single: true }, [ + expectPreferredCandidates({ preferRegion: [], single: true }, [ await buildReleaseCandidatesWithRegionLanguage('one', 'USA', 'EN'), await buildReleaseCandidatesWithRegionLanguage('two', 'USA', ['ES', 'EN']), await buildReleaseCandidatesWithRegionLanguage('three', ['EUR', 'USA'], 'EN'), @@ -372,7 +372,7 @@ describe('sort', () => { }); it('should return the first candidate when none matching', async () => { - await expectPreferredCandidates({ preferRegion: ['USA', 'EUR'], single: true }, [ + expectPreferredCandidates({ preferRegion: ['USA', 'EUR'], single: true }, [ await buildReleaseCandidatesWithRegionLanguage('one', 'EUR', ['DE', 'IT']), await buildReleaseCandidatesWithRegionLanguage('two', ['TAI', 'CHN'], 'ZH'), await buildReleaseCandidatesWithRegionLanguage('three (Japan)', undefined, undefined), @@ -381,7 +381,7 @@ describe('sort', () => { }); it('should return the first matching candidate when some matching', async () => { - await expectPreferredCandidates({ preferRegion: ['USA', 'EUR'], single: true }, [ + expectPreferredCandidates({ preferRegion: ['USA', 'EUR'], single: true }, [ await buildReleaseCandidatesWithRegionLanguage('one', 'USA', 'EN'), await buildReleaseCandidatesWithRegionLanguage('two', 'USA', ['ES', 'EN']), await buildReleaseCandidatesWithRegionLanguage('three', ['EUR', 'USA'], 'EN'), @@ -393,7 +393,7 @@ describe('sort', () => { }); it('should return the first candidate when all matching', async () => { - await expectPreferredCandidates({ preferRegion: ['USA', 'EUR'], single: true }, [ + expectPreferredCandidates({ preferRegion: ['USA', 'EUR'], single: true }, [ await buildReleaseCandidatesWithRegionLanguage('one', 'USA', 'EN'), await buildReleaseCandidatesWithRegionLanguage('two', 'USA', ['ES', 'EN']), await buildReleaseCandidatesWithRegionLanguage('three', ['EUR', 'USA'], 'EN'), @@ -412,7 +412,7 @@ describe('sort', () => { 'Fighting Vipers 2 v1.001 (2000)(Sega)(PAL)(M6)[!]', ], 'Fighting Vipers 2 v1.001 (2000)(Sega)(PAL)(M6)[!]'], ])('should return the first candidate when all matching by name: %s', async (names, expectedName) => { - await expectPreferredCandidates( + expectPreferredCandidates( { preferRegion: ['USA', 'EUR', 'JPN'], single: true }, [await buildReleaseCandidatesWithRegionLanguage(names)], [expectedName], @@ -421,28 +421,12 @@ describe('sort', () => { }); describe('prefer revision newer', () => { - test.each([ - [['one'], 'one'], - [['two', 'two (Rev 1)'], 'two'], - [['three', 'three (Rev 1)', 'three (Rev2)'], 'three'], - [['four (Rev 1.1)', 'four (Rev 1.2)'], 'four (Rev 1.1)'], - [['five (Rev 13.37)'], 'five (Rev 13.37)'], - [['six (Rev B)', 'six (Rev A)', 'six (Rev C)'], 'six (Rev B)'], - [['seven (RE2)', 'seven (RE3)', 'seven'], 'seven (RE2)'], - ])('should return the first candidate when option is false: %s', async (names, expectedName) => { - await expectPreferredCandidates( - { preferRevisionNewer: false, single: true }, - [await buildReleaseCandidatesWithRegionLanguage(names)], - [expectedName], - ); - }); - test.each([ [['one'], 'one'], [['two', 'two two'], 'two'], ])('should return the first candidate when none matching: %s', async (names, expectedName) => { - await expectPreferredCandidates( - { preferRevisionNewer: true, single: true }, + expectPreferredCandidates( + { preferRevision: PreferRevision[PreferRevision.NEWER].toLowerCase(), single: true }, [await buildReleaseCandidatesWithRegionLanguage(names)], [expectedName], ); @@ -457,8 +441,8 @@ describe('sort', () => { [['six (Rev B)', 'six (Rev A)', 'six (Rev C)'], 'six (Rev C)'], [['seven (RE2)', 'seven (RE3)', 'seven'], 'seven (RE3)'], ])('should return the first matching candidate when some matching: %s', async (names, expectedName) => { - await expectPreferredCandidates( - { preferRevisionNewer: true, single: true }, + expectPreferredCandidates( + { preferRevision: PreferRevision[PreferRevision.NEWER].toLowerCase(), single: true }, [await buildReleaseCandidatesWithRegionLanguage(names)], [expectedName], ); @@ -473,8 +457,8 @@ describe('sort', () => { 'ChuChu Rocket! v1.014 (2000)(Sega)(PAL)(M5)[!]', ], 'ChuChu Rocket! v1.014 (2000)(Sega)(PAL)(M5)[!]'], ])('should return the first candidate when all matching: %s', async (names, expectedName) => { - await expectPreferredCandidates( - { preferRevisionNewer: true, single: true }, + expectPreferredCandidates( + { preferRevision: PreferRevision[PreferRevision.NEWER].toLowerCase(), single: true }, [await buildReleaseCandidatesWithRegionLanguage(names)], [expectedName], ); @@ -482,28 +466,12 @@ describe('sort', () => { }); describe('prefer revision older', () => { - test.each([ - [['one'], 'one'], - [['two', 'two (Rev 1)'], 'two'], - [['three', 'three (Rev 1)', 'three (Rev2)'], 'three'], - [['four (Rev 1.1)', 'four (Rev 1.2)'], 'four (Rev 1.1)'], - [['five (Rev 13.37)'], 'five (Rev 13.37)'], - [['six (Rev B)', 'six (Rev A)', 'six (Rev C)'], 'six (Rev B)'], - [['seven (RE2)', 'seven (RE3)', 'seven'], 'seven (RE2)'], - ])('should return the first candidate when option is false: %s', async (names, expectedName) => { - await expectPreferredCandidates( - { preferRevisionOlder: false, single: true }, - [await buildReleaseCandidatesWithRegionLanguage(names)], - [expectedName], - ); - }); - test.each([ [['one'], 'one'], [['two', 'two two'], 'two'], ])('should return the first candidate when none matching: %s', async (names, expectedName) => { - await expectPreferredCandidates( - { preferRevisionOlder: true, single: true }, + expectPreferredCandidates( + { preferRevision: PreferRevision[PreferRevision.OLDER].toLowerCase(), single: true }, [await buildReleaseCandidatesWithRegionLanguage(names)], [expectedName], ); @@ -518,8 +486,8 @@ describe('sort', () => { [['six (Rev B)', 'six (Rev A)', 'six (Rev C)'], 'six (Rev A)'], [['seven (RE2)', 'seven (RE3)', 'seven'], 'seven'], ])('should return the first matching candidate when some matching: %s', async (names, expectedName) => { - await expectPreferredCandidates( - { preferRevisionOlder: true, single: true }, + expectPreferredCandidates( + { preferRevision: PreferRevision[PreferRevision.OLDER].toLowerCase(), single: true }, [await buildReleaseCandidatesWithRegionLanguage(names)], [expectedName], ); @@ -529,8 +497,8 @@ describe('sort', () => { [['one (Rev 1.2)', 'one (Rev 1.1)'], 'one (Rev 1.1)'], [['two (Rev 13.37)'], 'two (Rev 13.37)'], ])('should return the first candidate when all matching: %s', async (names, expectedName) => { - await expectPreferredCandidates( - { preferRevisionOlder: true, single: true }, + expectPreferredCandidates( + { preferRevision: PreferRevision[PreferRevision.OLDER].toLowerCase(), single: true }, [await buildReleaseCandidatesWithRegionLanguage(names)], [expectedName], ); @@ -539,7 +507,7 @@ describe('sort', () => { describe('prefer retail', () => { it('should return the first candidate when option is false', async () => { - await expectPreferredCandidates({ preferRetail: false, single: true }, [ + expectPreferredCandidates({ preferRetail: false, single: true }, [ await buildReleaseCandidatesWithRegionLanguage(['one'], 'USA', 'EN'), await buildReleaseCandidatesWithRegionLanguage(['two (Aftermarket)', 'two'], 'USA', 'EN'), await buildReleaseCandidatesWithRegionLanguage(['three [b]', 'three'], 'USA', 'EN'), @@ -554,7 +522,7 @@ describe('sort', () => { }); it('should return the first candidate when none matching', async () => { - await expectPreferredCandidates({ preferRetail: true, single: true }, [ + expectPreferredCandidates({ preferRetail: true, single: true }, [ await buildReleaseCandidatesWithRegionLanguage(['one'], 'USA', 'EN'), await buildReleaseCandidatesWithRegionLanguage(['two', 'two two'], 'USA', 'EN'), await buildReleaseCandidatesWithRegionLanguage(['three', 'three three', 'three three three'], 'USA', 'EN'), @@ -562,7 +530,7 @@ describe('sort', () => { }); it('should return the first matching candidate when some matching', async () => { - await expectPreferredCandidates({ preferRetail: true, single: true }, [ + expectPreferredCandidates({ preferRetail: true, single: true }, [ await buildReleaseCandidatesWithRegionLanguage(['one'], 'USA', 'EN'), await buildReleaseCandidatesWithRegionLanguage(['two (Aftermarket)', 'two'], 'USA', 'EN'), await buildReleaseCandidatesWithRegionLanguage(['three [b]', 'three'], 'USA', 'EN'), @@ -577,7 +545,7 @@ describe('sort', () => { }); it('should return the first candidate when all matching', async () => { - await expectPreferredCandidates({ preferRetail: true, single: true }, [ + expectPreferredCandidates({ preferRetail: true, single: true }, [ await buildReleaseCandidatesWithRegionLanguage(['two (Aftermarket)', 'two'], 'USA', 'EN'), await buildReleaseCandidatesWithRegionLanguage(['three [b]', 'three'], 'USA', 'EN'), await buildReleaseCandidatesWithRegionLanguage(['four (Beta)', 'four (Proto)', 'four'], 'USA', 'EN'), @@ -591,76 +559,9 @@ describe('sort', () => { }); }); - describe('prefer NTSC', () => { - it('should return the first candidate when option is false', async () => { - await expectPreferredCandidates({ preferNTSC: false, single: true }, [ - await buildReleaseCandidatesWithRegionLanguage(['one']), - await buildReleaseCandidatesWithRegionLanguage(['two', 'two (NTSC)']), - await buildReleaseCandidatesWithRegionLanguage(['three (NTSC)', 'three']), - ], ['one', 'two', 'three (NTSC)']); - }); - - it('should return the first candidate when none matching', async () => { - await expectPreferredCandidates({ preferNTSC: true, single: true }, [ - await buildReleaseCandidatesWithRegionLanguage(['one']), - await buildReleaseCandidatesWithRegionLanguage(['four', 'four (Demo)']), - ], ['one', 'four']); - }); - - it('should return the first matching candidate when some matching', async () => { - await expectPreferredCandidates({ preferNTSC: true, single: true }, [ - await buildReleaseCandidatesWithRegionLanguage(['one']), - await buildReleaseCandidatesWithRegionLanguage(['two', 'two (NTSC)']), - await buildReleaseCandidatesWithRegionLanguage(['three (NTSC)', 'three']), - ], ['one', 'two (NTSC)', 'three (NTSC)']); - }); - - it('should return the first candidate when all matching', async () => { - await expectPreferredCandidates({ preferNTSC: true, single: true }, [ - await buildReleaseCandidatesWithRegionLanguage(['two', 'two (NTSC)']), - await buildReleaseCandidatesWithRegionLanguage(['three (NTSC)', 'three']), - ], ['two (NTSC)', 'three (NTSC)']); - }); - }); - - describe('prefer PAL', () => { - it('should return the first candidate when option is false', async () => { - await expectPreferredCandidates({ preferPAL: false, single: true }, [ - await buildReleaseCandidatesWithRegionLanguage(['one']), - await buildReleaseCandidatesWithRegionLanguage(['two', 'two (PAL)']), - await buildReleaseCandidatesWithRegionLanguage(['three', 'three (PAL 60Hz)']), - await buildReleaseCandidatesWithRegionLanguage(['four (PAL)', 'four']), - ], ['one', 'two', 'three', 'four (PAL)']); - }); - - it('should return the first candidate when none matching', async () => { - await expectPreferredCandidates({ preferPAL: true, single: true }, [ - await buildReleaseCandidatesWithRegionLanguage(['one']), - await buildReleaseCandidatesWithRegionLanguage(['five', 'five (Demo)']), - ], ['one', 'five']); - }); - - it('should return the first matching candidate when some matching', async () => { - await expectPreferredCandidates({ preferPAL: true, single: true }, [ - await buildReleaseCandidatesWithRegionLanguage(['one']), - await buildReleaseCandidatesWithRegionLanguage(['two', 'two (PAL)']), - await buildReleaseCandidatesWithRegionLanguage(['three', 'three (PAL 60Hz)']), - await buildReleaseCandidatesWithRegionLanguage(['four (PAL)', 'four']), - ], ['one', 'two (PAL)', 'three (PAL 60Hz)', 'four (PAL)']); - }); - - it('should return the first candidate when all matching', async () => { - await expectPreferredCandidates({ preferPAL: true, single: true }, [ - await buildReleaseCandidatesWithRegionLanguage(['two', 'two (PAL)']), - await buildReleaseCandidatesWithRegionLanguage(['three', 'three (PAL 60Hz)']), - await buildReleaseCandidatesWithRegionLanguage(['four (PAL)', 'four']), - ], ['two (PAL)', 'three (PAL 60Hz)', 'four (PAL)']); - }); - }); - describe('prefer parent', () => { it('should return the first candidate when option is false', async () => { - await expectPreferredCandidates({ preferParent: false, single: true }, [ + expectPreferredCandidates({ preferParent: false, single: true }, [ await buildReleaseCandidatesWithRegionLanguage('one', 'USA', 'EN'), await buildReleaseCandidatesWithRegionLanguage(['two', 'two two'], 'USA', 'EN'), await buildReleaseCandidatesWithRegionLanguage('three', 'USA', 'EN', { cloneOf: 'zero' }), @@ -671,14 +572,14 @@ describe('sort', () => { }); it('should return the first candidate when none matching', async () => { - await expectPreferredCandidates({ preferParent: true, single: true }, [ + expectPreferredCandidates({ preferParent: true, single: true }, [ await buildReleaseCandidatesWithRegionLanguage('one', 'USA', 'EN'), await buildReleaseCandidatesWithRegionLanguage(['two', 'two two'], 'USA', 'EN'), ], ['one (USA) (EN)', 'two (USA) (EN)']); }); it('should return the first matching candidate when some matching', async () => { - await expectPreferredCandidates({ preferParent: true, single: true }, [ + expectPreferredCandidates({ preferParent: true, single: true }, [ await buildReleaseCandidatesWithRegionLanguage('one', 'USA', 'EN'), await buildReleaseCandidatesWithRegionLanguage(['two', 'two two'], 'USA', 'EN'), await buildReleaseCandidatesWithRegionLanguage('three', 'USA', 'EN', { cloneOf: 'zero' }), @@ -689,7 +590,7 @@ describe('sort', () => { }); it('should return the first candidate when all matching', async () => { - await expectPreferredCandidates({ preferParent: true, single: true }, [ + expectPreferredCandidates({ preferParent: true, single: true }, [ await buildReleaseCandidatesWithRegionLanguage('one', 'USA', 'EN', { cloneOf: 'zero' }), await buildReleaseCandidatesWithRegionLanguage(['two (Parent)', 'two (Clone)'], 'USA', 'EN', [{}, { cloneOf: 'zero' }]), await buildReleaseCandidatesWithRegionLanguage(['three (Clone)', 'three (Parent)'], 'USA', 'EN', [{ cloneOf: 'zero' }, {}]), diff --git a/test/modules/candidateValidator.test.ts b/test/modules/candidateValidator.test.ts index ae5c550e4..5338fb108 100644 --- a/test/modules/candidateValidator.test.ts +++ b/test/modules/candidateValidator.test.ts @@ -32,7 +32,7 @@ it('should do nothing with no candidates', async () => { const dat = new LogiqxDAT(new Header(), []); const parentsToCandidates = await datToCandidates(dat); - const invalidCandidates = await new CandidateValidator(new ProgressBarFake()) + const invalidCandidates = new CandidateValidator(new ProgressBarFake()) .validate(dat, parentsToCandidates); expect(invalidCandidates).toHaveLength(0); @@ -59,7 +59,7 @@ it('should return nothing if all candidates have unique paths', async () => { ]); const parentsToCandidates = await datToCandidates(dat); - const invalidCandidates = await new CandidateValidator(new ProgressBarFake()) + const invalidCandidates = new CandidateValidator(new ProgressBarFake()) .validate(dat, parentsToCandidates); expect(invalidCandidates).toHaveLength(0); @@ -100,7 +100,7 @@ it('should return something if some candidates have conflicting paths', async () ]); const parentsToCandidates = await datToCandidates(dat); - const invalidCandidates = await new CandidateValidator(new ProgressBarFake()) + const invalidCandidates = new CandidateValidator(new ProgressBarFake()) .validate(dat, parentsToCandidates); const invalidCandidateNames = invalidCandidates diff --git a/test/modules/candidateWriter.test.ts b/test/modules/candidateWriter.test.ts index d0af61ac2..385fd08c6 100644 --- a/test/modules/candidateWriter.test.ts +++ b/test/modules/candidateWriter.test.ts @@ -1,9 +1,12 @@ +import 'jest-extended'; + import fs, { Stats } from 'node:fs'; import os from 'node:os'; import path from 'node:path'; import Temp from '../../src/globals/temp.js'; import CandidateCombiner from '../../src/modules/candidateCombiner.js'; +import CandidateExtensionCorrector from '../../src/modules/candidateExtensionCorrector.js'; import CandidateGenerator from '../../src/modules/candidateGenerator.js'; import CandidatePatchGenerator from '../../src/modules/candidatePatchGenerator.js'; import CandidateWriter from '../../src/modules/candidateWriter.js'; @@ -19,6 +22,7 @@ import LogiqxDAT from '../../src/types/dats/logiqx/logiqxDat.js'; import Archive from '../../src/types/files/archives/archive.js'; import ArchiveEntry from '../../src/types/files/archives/archiveEntry.js'; import File from '../../src/types/files/file.js'; +import FileCache from '../../src/types/files/fileCache.js'; import { ChecksumBitmask } from '../../src/types/files/fileChecksums.js'; import FileFactory from '../../src/types/files/fileFactory.js'; import Options, { GameSubdirMode, OptionsProps } from '../../src/types/options.js'; @@ -59,7 +63,7 @@ async function walkAndStat(dirPath: string): Promise<[string, Stats][]> { // Hard-code properties that can change with file reads stats.atime = new Date(0); stats.atimeMs = 0; - // Hard-code properties that can change with hardlinking + // Hard-code properties that can change with hard-linking stats.ctimeMs = 0; stats.nlink = 0; } catch { @@ -73,9 +77,9 @@ async function walkAndStat(dirPath: string): Promise<[string, Stats][]> { ); } -function datInferrer(options: Options, romFiles: File[]): DAT { +async function datInferrer(options: Options, romFiles: File[]): Promise { // Run DATGameInferrer, but condense all DATs down to one - const datGames = new DATGameInferrer(options, new ProgressBarFake()).infer(romFiles) + const datGames = (await new DATGameInferrer(options, new ProgressBarFake()).infer(romFiles)) .flatMap((dat) => dat.getGames()); // TODO(cemmer): filter to unique games / remove duplicates return new LogiqxDAT(new Header({ name: 'ROMWriter Test' }), datGames); @@ -92,28 +96,44 @@ async function candidateWriter( const options = new Options({ ...optionsProps, input: [path.join(inputTemp, 'roms', inputGlob)], + inputExclude: [path.join(inputTemp, 'roms', '**', '*.nkit.*')], ...(patchGlob ? { patch: [path.join(inputTemp, patchGlob)] } : {}), output: outputTemp, }); let romFiles: File[] = []; try { - romFiles = await new ROMScanner(options, new ProgressBarFake()).scan(); + romFiles = await new ROMScanner( + options, + new ProgressBarFake(), + new FileFactory(new FileCache()), + ).scan(); } catch { /* ignored */ } - const dat = datInferrer(options, romFiles); - const romFilesWithHeaders = await new ROMHeaderProcessor(options, new ProgressBarFake()) - .process(romFiles); - const indexedRomFiles = await new ROMIndexer(options, new ProgressBarFake()) - .index(romFilesWithHeaders); + const dat = await datInferrer(options, romFiles); + const romFilesWithHeaders = await new ROMHeaderProcessor( + options, + new ProgressBarFake(), + new FileFactory(new FileCache()), + ).process(romFiles); + const indexedRomFiles = new ROMIndexer(options, new ProgressBarFake()).index(romFilesWithHeaders); let candidates = await new CandidateGenerator(options, new ProgressBarFake()) .generate(dat, indexedRomFiles); if (patchGlob) { - const patches = await new PatchScanner(options, new ProgressBarFake()).scan(); + const patches = await new PatchScanner( + options, + new ProgressBarFake(), + new FileFactory(new FileCache()), + ).scan(); candidates = await new CandidatePatchGenerator(new ProgressBarFake()) .generate(dat, candidates, patches); } - candidates = await new CandidateCombiner(options, new ProgressBarFake()) + candidates = await new CandidateExtensionCorrector( + options, + new ProgressBarFake(), + new FileFactory(new FileCache()), + ).correct(dat, candidates); + candidates = new CandidateCombiner(options, new ProgressBarFake()) .combine(dat, candidates); // When @@ -252,8 +272,8 @@ describe('zip', () => { it('should not write anything if the output is expected and overwriting invalid', async () => { await copyFixturesToTemp(async (inputTemp, outputTemp) => { - // Note: need to de-conflict headered & headerless ROMs due to duplicate output paths - const inputGlob = '**/!(headerless)/*'; + // Note: need to exclude some ROMs to prevent duplicate output paths + const inputGlob = '**/!(chd|headerless)/*'; // Given const options = new Options({ commands: ['copy', 'zip'] }); @@ -357,7 +377,8 @@ describe('zip', () => { outputTemp, ); expect(outputFiles).toHaveLength(1); - const archiveEntries = await FileFactory.filesFrom(path.join(outputTemp, outputFiles[0][0])); + const archiveEntries = await new FileFactory(new FileCache()) + .filesFrom(path.join(outputTemp, outputFiles[0][0])); expect(archiveEntries).toHaveLength(1); const archiveEntry = archiveEntries[0] as ArchiveEntry; expect(archiveEntry.getEntryPath()).toEqual(expectedFileName); @@ -392,7 +413,8 @@ describe('zip', () => { outputTemp, ); expect(outputFiles).toHaveLength(1); - const archiveEntries = await FileFactory.filesFrom(path.join(outputTemp, outputFiles[0][0])); + const archiveEntries = await new FileFactory(new FileCache()) + .filesFrom(path.join(outputTemp, outputFiles[0][0])); expect(archiveEntries).toHaveLength(1); const archiveEntry = archiveEntries[0] as ArchiveEntry; expect(archiveEntry.getEntryPath()).toEqual(expectedFileName); @@ -423,7 +445,8 @@ describe('zip', () => { const outputFiles = (await candidateWriter(options, inputTemp, inputGlob, 'patches', outputTemp)); const writtenRomsAndCrcs = (await Promise.all(outputFiles - .map(async ([outputPath]) => FileFactory.filesFrom(path.join(outputTemp, outputPath))))) + .map(async ([outputPath]) => new FileFactory(new FileCache()) + .filesFrom(path.join(outputTemp, outputPath))))) .flat() .map((entry) => [entry.toString().replace(outputTemp + path.sep, ''), entry.getCrc32() ?? '']) .sort((a, b) => a[0].localeCompare(b[0])); @@ -434,7 +457,7 @@ describe('zip', () => { test.each([ [ '**/!(header*)/*', - ['0F09A40.zip', '3708F2C.zip', '612644F.zip', '65D1206.zip', '92C85C9.zip', 'C01173E.zip', 'KDULVQN.zip', 'before.zip', 'best.zip', 'empty.zip', 'five.zip', 'fizzbuzz.zip', 'foobar.zip', 'four.zip', 'fourfive.zip', 'loremipsum.zip', 'one.zip', 'onetwothree.zip', 'three.zip', 'two.zip', 'unknown.zip'], + ['0F09A40.zip', '2048.zip', '3708F2C.zip', '4096.zip', '612644F.zip', '65D1206.zip', '92C85C9.zip', 'C01173E.zip', 'CD-ROM.zip', 'GD-ROM.zip', 'KDULVQN.zip', 'UMD.zip', 'before.zip', 'best.zip', 'empty.zip', 'five.zip', 'fizzbuzz.zip', 'foobar.zip', 'four.zip', 'fourfive.zip', 'loremipsum.zip', 'one.zip', 'onetwothree.zip', 'three.zip', 'two.zip', 'unknown.zip'], ], [ '7z/*', @@ -484,33 +507,33 @@ describe('zip', () => { test.each([ [ '**/!(header*)/*', - ['0F09A40.zip', '3708F2C.zip', '612644F.zip', '65D1206.zip', '92C85C9.zip', 'C01173E.zip', 'KDULVQN.zip', 'before.zip', 'best.zip', 'empty.zip', 'five.zip', 'fizzbuzz.zip', 'foobar.zip', 'four.zip', 'fourfive.zip', 'loremipsum.zip', 'one.zip', 'onetwothree.zip', 'three.zip', 'two.zip', 'unknown.zip'], - ['patchable/0F09A40.rom', 'patchable/3708F2C.rom', 'patchable/612644F.rom', 'patchable/65D1206.rom', 'patchable/92C85C9.rom', 'patchable/C01173E.rom', 'patchable/KDULVQN.rom', 'patchable/before.rom', 'patchable/best.gz', 'raw/empty.rom', 'raw/fizzbuzz.nes', 'raw/foobar.lnx', 'raw/loremipsum.rom', 'raw/one.rom', 'raw/three.rom', 'raw/two.rom', 'raw/unknown.rom'], + ['0F09A40.zip', '2048.zip', '3708F2C.zip', '4096.zip', '612644F.zip', '65D1206.zip', '92C85C9.zip', 'C01173E.zip', 'CD-ROM.zip', 'GD-ROM.zip', 'KDULVQN.zip', 'UMD.zip', 'before.zip', 'best.zip', 'empty.zip', 'five.zip', 'fizzbuzz.zip', 'foobar.zip', 'four.zip', 'fourfive.zip', 'loremipsum.zip', 'one.zip', 'onetwothree.zip', 'three.zip', 'two.zip', 'unknown.zip'], + ['zip/fourfive.zip'], ], [ '7z/*', ['fizzbuzz.zip', 'foobar.zip', 'loremipsum.zip', 'onetwothree.zip', 'unknown.zip'], - ['7z/fizzbuzz.7z', '7z/foobar.7z', '7z/loremipsum.7z', '7z/onetwothree.7z', '7z/unknown.7z'], + [], ], [ 'rar/*', ['fizzbuzz.zip', 'foobar.zip', 'loremipsum.zip', 'onetwothree.zip', 'unknown.zip'], - ['rar/fizzbuzz.rar', 'rar/foobar.rar', 'rar/loremipsum.rar', 'rar/onetwothree.rar', 'rar/unknown.rar'], + [], ], [ 'raw/*', ['empty.zip', 'five.zip', 'fizzbuzz.zip', 'foobar.zip', 'four.zip', 'loremipsum.zip', 'one.zip', 'three.zip', 'two.zip', 'unknown.zip'], - ['raw/empty.rom', 'raw/fizzbuzz.nes', 'raw/foobar.lnx', 'raw/loremipsum.rom', 'raw/one.rom', 'raw/three.rom', 'raw/two.rom', 'raw/unknown.rom'], + [], ], [ 'tar/*', ['fizzbuzz.zip', 'foobar.zip', 'loremipsum.zip', 'onetwothree.zip', 'unknown.zip'], - ['tar/fizzbuzz.tar.gz', 'tar/foobar.tar.gz', 'tar/loremipsum.tar.gz', 'tar/onetwothree.tar.gz', 'tar/unknown.tar.gz'], + [], ], [ 'zip/*', ['fizzbuzz.zip', 'foobar.zip', 'fourfive.zip', 'loremipsum.zip', 'onetwothree.zip', 'unknown.zip'], - ['zip/fizzbuzz.zip', 'zip/foobar.zip', 'zip/loremipsum.zip', 'zip/onetwothree.zip', 'zip/unknown.zip'], + ['zip/fizzbuzz.zip', 'zip/foobar.zip', 'zip/fourfive.zip', 'zip/loremipsum.zip', 'zip/unknown.zip'], ], ])('should move, zip, and test: %s', async (inputGlob, expectedOutputPaths, expectedDeletedInputPaths) => { await copyFixturesToTemp(async (inputTemp, outputTemp) => { @@ -541,17 +564,15 @@ describe('zip', () => { // File wasn't deleted, ensure it wasn't touched expect(statsAfter).toEqual(statsBefore); }); - romFilesBefore + expect(romFilesBefore .filter(([inputFile]) => !romFilesAfter.has(inputFile)) - .forEach(([inputFile]) => { - // File was deleted, ensure it was expected - expect(expectedDeletedInputPaths).toContain(inputFile.replace(/[\\/]/g, '/')); - }); + .map(([inputFile]) => inputFile.replace(/[\\/]/g, '/'))) + .toIncludeSameMembers(expectedDeletedInputPaths); }); }); test.each([ - ['**/*', [ + ['**/!(chd)/*', [ ['ROMWriter Test.zip|0F09A40.rom', '2f943e86'], ['ROMWriter Test.zip|3708F2C.rom', '20891c9f'], ['ROMWriter Test.zip|612644F.rom', 'f7591b29'], @@ -561,6 +582,10 @@ describe('zip', () => { ['ROMWriter Test.zip|before.rom', '0361b321'], ['ROMWriter Test.zip|best.rom', '1e3d78cf'], ['ROMWriter Test.zip|C01173E.rom', 'dfaebe28'], + [`ROMWriter Test.zip|${path.join('CD-ROM', 'CD-ROM (Track 1).bin')}`, '49ca35fb'], + [`ROMWriter Test.zip|${path.join('CD-ROM', 'CD-ROM (Track 2).bin')}`, '0316f720'], + [`ROMWriter Test.zip|${path.join('CD-ROM', 'CD-ROM (Track 3).bin')}`, 'a320af40'], + [`ROMWriter Test.zip|${path.join('CD-ROM', 'CD-ROM.cue')}`, '4ce39e73'], ['ROMWriter Test.zip|color_test.nintendoentertainmentsystem', 'c9c1b7aa'], ['ROMWriter Test.zip|diagnostic_test_cartridge.a78', 'f6cc9b1c'], ['ROMWriter Test.zip|empty.rom', '00000000'], @@ -571,6 +596,11 @@ describe('zip', () => { ['ROMWriter Test.zip|four.rom', '1cf3ca74'], [`ROMWriter Test.zip|${path.join('fourfive', 'five.rom')}`, '3e5daf67'], [`ROMWriter Test.zip|${path.join('fourfive', 'four.rom')}`, '1cf3ca74'], + [`ROMWriter Test.zip|${path.join('GD-ROM', 'GD-ROM.gdi')}`, 'f16f621c'], + [`ROMWriter Test.zip|${path.join('GD-ROM', 'track01.bin')}`, '9796ed9a'], + [`ROMWriter Test.zip|${path.join('GD-ROM', 'track02.raw')}`, 'abc178d5'], + [`ROMWriter Test.zip|${path.join('GD-ROM', 'track03.bin')}`, '61a363f1'], + [`ROMWriter Test.zip|${path.join('GD-ROM', 'track04.bin')}`, 'fc5ff5a0'], ['ROMWriter Test.zip|KDULVQN.rom', 'b1c303e4'], ['ROMWriter Test.zip|LCDTestROM.lnx', '2d251538'], ['ROMWriter Test.zip|loremipsum.rom', '70856527'], @@ -582,6 +612,7 @@ describe('zip', () => { ['ROMWriter Test.zip|speed_test_v51.smc', '9adca6cc'], ['ROMWriter Test.zip|three.rom', 'ff46c5d8'], ['ROMWriter Test.zip|two.rom', '96170874'], + ['ROMWriter Test.zip|UMD.iso', 'e90f7cf5'], ['ROMWriter Test.zip|unknown.rom', '377a7727'], ]], ['raw/*', [ @@ -614,7 +645,7 @@ describe('zip', () => { // Then expect(outputFiles).toHaveLength(1); const outputFile = path.join(outputTemp, outputFiles[0][0]); - const writtenRomsAndCrcs = (await FileFactory.filesFrom(outputFile)) + const writtenRomsAndCrcs = (await new FileFactory(new FileCache()).filesFrom(outputFile)) .map((entry) => [entry.toString().replace(outputTemp + path.sep, ''), entry.getCrc32() ?? '']) .sort((a, b) => a[0].localeCompare(b[0])); expect(writtenRomsAndCrcs).toEqual(expectedFilesAndCrcs); @@ -700,13 +731,16 @@ describe('extract', () => { it('should not write anything if the output is expected and overwriting invalid', async () => { await copyFixturesToTemp(async (inputTemp, outputTemp) => { + // Note: need to exclude some ROMs to prevent duplicate output paths + const inputGlob = '**/!(chd)/*'; + // Given const options = new Options({ commands: ['copy', 'extract'] }); const inputFilesBefore = await walkAndStat(inputTemp); await expect(walkAndStat(outputTemp)).resolves.toHaveLength(0); // And we've written once - await candidateWriter(options, inputTemp, '**/*', undefined, outputTemp); + await candidateWriter(options, inputTemp, inputGlob, undefined, outputTemp); // And files were written const outputFilesBefore = await walkAndStat(outputTemp); @@ -717,7 +751,7 @@ describe('extract', () => { await candidateWriter({ ...options, overwriteInvalid: true, - }, inputTemp, '**/*', undefined, outputTemp); + }, inputTemp, inputGlob, undefined, outputTemp); // Then the output wasn't touched await expect(walkAndStat(outputTemp)).resolves.toEqual(outputFilesBefore); @@ -730,7 +764,7 @@ describe('extract', () => { it('should write if the output is not expected and overwriting invalid', async () => { await copyFixturesToTemp(async (inputTemp, outputTemp) => { // Given - const options = new Options({ commands: ['copy', 'extract'] }); + const options = new Options({ commands: ['copy', 'extract'], writerThreads: 1 }); const inputFilesBefore = await walkAndStat(inputTemp); await expect(walkAndStat(outputTemp)).resolves.toHaveLength(0); @@ -858,7 +892,8 @@ describe('extract', () => { const outputFiles = (await candidateWriter(options, inputTemp, inputGlob, 'patches', outputTemp)); const writtenRomsAndCrcs = (await Promise.all(outputFiles - .map(async ([outputPath]) => FileFactory.filesFrom(path.join(outputTemp, outputPath))))) + .map(async ([outputPath]) => new FileFactory(new FileCache()) + .filesFrom(path.join(outputTemp, outputPath))))) .flat() .map((entry) => [entry.toString().replace(outputTemp + path.sep, ''), entry.getCrc32() ?? '']) .sort((a, b) => a[0].localeCompare(b[0])); @@ -869,7 +904,11 @@ describe('extract', () => { test.each([ [ '**/!(header*)/*', - ['0F09A40.rom', '3708F2C.rom', '612644F.rom', '65D1206.rom', '92C85C9.rom', 'C01173E.rom', 'KDULVQN.rom', 'before.rom', 'best.rom', 'empty.rom', 'five.rom', 'fizzbuzz.nes', 'foobar.lnx', 'four.rom', path.join('fourfive', 'five.rom'), path.join('fourfive', 'four.rom'), 'loremipsum.rom', 'one.rom', path.join('onetwothree', 'one.rom'), path.join('onetwothree', 'three.rom'), path.join('onetwothree', 'two.rom'), 'three.rom', 'two.rom', 'unknown.rom'], + ['0F09A40.rom', '2048.rom', '3708F2C.rom', '4096.rom', '612644F.rom', '65D1206.rom', '92C85C9.rom', 'C01173E.rom', + path.join('CD-ROM', 'CD-ROM (Track 1).bin'), path.join('CD-ROM', 'CD-ROM (Track 2).bin'), path.join('CD-ROM', 'CD-ROM (Track 3).bin'), path.join('CD-ROM', 'CD-ROM.cue'), + path.join('GD-ROM', 'GD-ROM.gdi'), path.join('GD-ROM', 'track01.bin'), path.join('GD-ROM', 'track02.raw'), path.join('GD-ROM', 'track03.bin'), path.join('GD-ROM', 'track04.bin'), + 'KDULVQN.rom', 'UMD.iso', 'before.rom', 'best.rom', 'empty.rom', 'five.rom', 'fizzbuzz.nes', 'foobar.lnx', 'four.rom', path.join('fourfive', 'five.rom'), path.join('fourfive', 'four.rom'), 'loremipsum.rom', 'one.rom', path.join('onetwothree', 'one.rom'), path.join('onetwothree', 'three.rom'), path.join('onetwothree', 'two.rom'), 'three.rom', + 'two.rom', 'unknown.rom'], ], [ '7z/*', @@ -922,33 +961,37 @@ describe('extract', () => { test.each([ [ '**/!(header*)/*', - ['0F09A40.rom', '3708F2C.rom', '612644F.rom', '65D1206.rom', '92C85C9.rom', 'C01173E.rom', 'KDULVQN.rom', 'before.rom', 'best.rom', 'empty.rom', 'five.rom', 'fizzbuzz.nes', 'foobar.lnx', 'four.rom', path.join('fourfive', 'five.rom'), path.join('fourfive', 'four.rom'), 'loremipsum.rom', 'one.rom', path.join('onetwothree', 'one.rom'), path.join('onetwothree', 'three.rom'), path.join('onetwothree', 'two.rom'), 'three.rom', 'two.rom', 'unknown.rom'], - ['patchable/0F09A40.rom', 'patchable/3708F2C.rom', 'patchable/612644F.rom', 'patchable/65D1206.rom', 'patchable/92C85C9.rom', 'patchable/C01173E.rom', 'patchable/KDULVQN.rom', 'patchable/before.rom', 'patchable/best.gz', 'raw/empty.rom', 'raw/fizzbuzz.nes', 'raw/foobar.lnx', 'raw/loremipsum.rom', 'raw/one.rom', 'raw/three.rom', 'raw/two.rom', 'raw/unknown.rom'], + ['0F09A40.rom', '2048.rom', '3708F2C.rom', '4096.rom', '612644F.rom', '65D1206.rom', '92C85C9.rom', 'C01173E.rom', + path.join('CD-ROM', 'CD-ROM (Track 1).bin'), path.join('CD-ROM', 'CD-ROM (Track 2).bin'), path.join('CD-ROM', 'CD-ROM (Track 3).bin'), path.join('CD-ROM', 'CD-ROM.cue'), + path.join('GD-ROM', 'GD-ROM.gdi'), path.join('GD-ROM', 'track01.bin'), path.join('GD-ROM', 'track02.raw'), path.join('GD-ROM', 'track03.bin'), path.join('GD-ROM', 'track04.bin'), + 'KDULVQN.rom', 'UMD.iso', 'before.rom', 'best.rom', 'empty.rom', 'five.rom', 'fizzbuzz.nes', 'foobar.lnx', 'four.rom', path.join('fourfive', 'five.rom'), path.join('fourfive', 'four.rom'), 'loremipsum.rom', 'one.rom', path.join('onetwothree', 'one.rom'), path.join('onetwothree', 'three.rom'), path.join('onetwothree', 'two.rom'), 'three.rom', + 'two.rom', 'unknown.rom'], + ['discs/CD-ROM (Track 1).bin', 'discs/CD-ROM (Track 2).bin', 'discs/CD-ROM (Track 3).bin', 'discs/CD-ROM.cue', 'discs/GD-ROM.gdi', 'discs/UMD.iso', 'discs/track01.bin', 'discs/track02.raw', 'discs/track03.bin', 'discs/track04.bin', 'patchable/0F09A40.rom', 'patchable/3708F2C.rom', 'patchable/612644F.rom', 'patchable/65D1206.rom', 'patchable/92C85C9.rom', 'patchable/C01173E.rom', 'patchable/KDULVQN.rom', 'patchable/before.rom', 'raw/empty.rom', 'raw/five.rom', 'raw/fizzbuzz.nes', 'raw/foobar.lnx', 'raw/four.rom', 'raw/loremipsum.rom', 'raw/one.rom', 'raw/three.rom', 'raw/two.rom', 'raw/unknown.rom'], ], [ '7z/*', ['fizzbuzz.nes', 'foobar.lnx', 'loremipsum.rom', path.join('onetwothree', 'one.rom'), path.join('onetwothree', 'three.rom'), path.join('onetwothree', 'two.rom'), 'unknown.rom'], - ['7z/fizzbuzz.7z', '7z/foobar.7z', '7z/loremipsum.7z', '7z/onetwothree.7z', '7z/unknown.7z'], + [], ], [ 'rar/*', ['fizzbuzz.nes', 'foobar.lnx', 'loremipsum.rom', path.join('onetwothree', 'one.rom'), path.join('onetwothree', 'three.rom'), path.join('onetwothree', 'two.rom'), 'unknown.rom'], - ['rar/fizzbuzz.rar', 'rar/foobar.rar', 'rar/loremipsum.rar', 'rar/onetwothree.rar', 'rar/unknown.rar'], + [], ], [ 'raw/*', ['empty.rom', 'five.rom', 'fizzbuzz.nes', 'foobar.lnx', 'four.rom', 'loremipsum.rom', 'one.rom', 'three.rom', 'two.rom', 'unknown.rom'], - ['raw/empty.rom', 'raw/fizzbuzz.nes', 'raw/foobar.lnx', 'raw/loremipsum.rom', 'raw/one.rom', 'raw/three.rom', 'raw/two.rom', 'raw/unknown.rom'], + ['raw/empty.rom', 'raw/five.rom', 'raw/fizzbuzz.nes', 'raw/foobar.lnx', 'raw/four.rom', 'raw/loremipsum.rom', 'raw/one.rom', 'raw/three.rom', 'raw/two.rom', 'raw/unknown.rom'], ], [ 'tar/*', ['fizzbuzz.nes', 'foobar.lnx', 'loremipsum.rom', path.join('onetwothree', 'one.rom'), path.join('onetwothree', 'three.rom'), path.join('onetwothree', 'two.rom'), 'unknown.rom'], - ['tar/fizzbuzz.tar.gz', 'tar/foobar.tar.gz', 'tar/loremipsum.tar.gz', 'tar/onetwothree.tar.gz', 'tar/unknown.tar.gz'], + [], ], [ 'zip/*', ['fizzbuzz.nes', 'foobar.lnx', path.join('fourfive', 'five.rom'), path.join('fourfive', 'four.rom'), 'loremipsum.rom', path.join('onetwothree', 'one.rom'), path.join('onetwothree', 'three.rom'), path.join('onetwothree', 'two.rom'), 'unknown.rom'], - ['zip/fizzbuzz.zip', 'zip/foobar.zip', 'zip/loremipsum.zip', 'zip/onetwothree.zip', 'zip/unknown.zip'], + [], ], ])('should move, extract, and test: %s', async (inputGlob, expectedOutputPaths, expectedDeletedInputPaths) => { await copyFixturesToTemp(async (inputTemp, outputTemp) => { @@ -982,12 +1025,10 @@ describe('extract', () => { // File wasn't deleted, ensure it wasn't touched expect(statsAfter).toEqual(statsBefore); }); - romFilesBefore + expect(romFilesBefore .filter(([inputFile]) => !romFilesAfter.has(inputFile)) - .forEach(([inputFile]) => { - // File was deleted, ensure it was expected - expect(expectedDeletedInputPaths).toContain(inputFile.replace(/[\\/]/g, '/')); - }); + .map(([inputFile]) => inputFile.replace(/[\\/]/g, '/'))) + .toIncludeSameMembers(expectedDeletedInputPaths); }); }); }); @@ -1217,7 +1258,8 @@ describe('raw', () => { const outputFiles = await candidateWriter(options, inputTemp, inputGlob, 'patches', outputTemp); const writtenRomsAndCrcs = (await Promise.all(outputFiles - .map(async ([outputPath]) => FileFactory.filesFrom(path.join(outputTemp, outputPath))))) + .map(async ([outputPath]) => new FileFactory(new FileCache()) + .filesFrom(path.join(outputTemp, outputPath))))) .flat() .map((entry) => [entry.toString().replace(outputTemp + path.sep, ''), entry.getCrc32() ?? '']) .sort((a, b) => a[0].localeCompare(b[0])); @@ -1228,7 +1270,11 @@ describe('raw', () => { test.each([ [ '**/!(header*)/*', - ['0F09A40.rom', '3708F2C.rom', '612644F.rom', '65D1206.rom', '92C85C9.rom', 'C01173E.rom', 'KDULVQN.rom', 'before.rom', 'best.gz', 'empty.rom', 'five.rom', 'fizzbuzz.nes', 'foobar.lnx', 'four.rom', 'fourfive.zip', 'loremipsum.rom', 'one.rom', 'onetwothree.zip', 'three.rom', 'two.rom', 'unknown.rom'], + ['0F09A40.rom', '2048.chd', '3708F2C.rom', '4096.chd', '612644F.rom', '65D1206.rom', '92C85C9.rom', 'C01173E.rom', + 'CD-ROM.chd', + 'GD-ROM.chd', + 'KDULVQN.rom', 'UMD.iso', 'before.rom', 'best.gz', 'empty.rom', 'five.rom', 'fizzbuzz.nes', 'foobar.lnx', 'four.rom', 'fourfive.zip', 'loremipsum.rom', 'one.rom', 'onetwothree.zip', 'three.rom', + 'two.rom', 'unknown.rom'], ], [ '7z/*', @@ -1278,8 +1324,12 @@ describe('raw', () => { test.each([ [ '**/!(header*)/*', - ['0F09A40.rom', '3708F2C.rom', '612644F.rom', '65D1206.rom', '92C85C9.rom', 'C01173E.rom', 'KDULVQN.rom', 'before.rom', 'best.gz', 'empty.rom', 'five.rom', 'fizzbuzz.nes', 'foobar.lnx', 'four.rom', 'fourfive.zip', 'loremipsum.rom', 'one.rom', 'onetwothree.zip', 'three.rom', 'two.rom', 'unknown.rom'], - ['patchable/0F09A40.rom', 'patchable/3708F2C.rom', 'patchable/612644F.rom', 'patchable/65D1206.rom', 'patchable/92C85C9.rom', 'patchable/C01173E.rom', 'patchable/KDULVQN.rom', 'patchable/before.rom', 'patchable/best.gz', 'raw/empty.rom', 'raw/fizzbuzz.nes', 'raw/foobar.lnx', 'raw/loremipsum.rom', 'raw/one.rom', 'raw/three.rom', 'raw/two.rom', 'raw/unknown.rom'], + ['0F09A40.rom', '2048.chd', '3708F2C.rom', '4096.chd', '612644F.rom', '65D1206.rom', '92C85C9.rom', 'C01173E.rom', + 'CD-ROM.chd', + 'GD-ROM.chd', + 'KDULVQN.rom', 'UMD.iso', 'before.rom', 'best.gz', 'empty.rom', 'five.rom', 'fizzbuzz.nes', 'foobar.lnx', 'four.rom', 'fourfive.zip', 'loremipsum.rom', 'one.rom', 'onetwothree.zip', 'three.rom', + 'two.rom', 'unknown.rom'], + ['chd/2048.chd', 'chd/4096.chd', 'chd/CD-ROM.chd', 'chd/GD-ROM.chd', 'discs/UMD.iso', 'patchable/0F09A40.rom', 'patchable/3708F2C.rom', 'patchable/612644F.rom', 'patchable/65D1206.rom', 'patchable/92C85C9.rom', 'patchable/C01173E.rom', 'patchable/KDULVQN.rom', 'patchable/before.rom', 'patchable/best.gz', 'raw/empty.rom', 'raw/five.rom', 'raw/fizzbuzz.nes', 'raw/foobar.lnx', 'raw/four.rom', 'raw/loremipsum.rom', 'raw/one.rom', 'raw/three.rom', 'raw/two.rom', 'raw/unknown.rom', 'zip/fourfive.zip', 'zip/onetwothree.zip'], ], [ '7z/*', @@ -1294,7 +1344,7 @@ describe('raw', () => { [ 'raw/*', ['empty.rom', 'five.rom', 'fizzbuzz.nes', 'foobar.lnx', 'four.rom', 'loremipsum.rom', 'one.rom', 'three.rom', 'two.rom', 'unknown.rom'], - ['raw/empty.rom', 'raw/fizzbuzz.nes', 'raw/foobar.lnx', 'raw/loremipsum.rom', 'raw/one.rom', 'raw/three.rom', 'raw/two.rom', 'raw/unknown.rom'], + ['raw/empty.rom', 'raw/five.rom', 'raw/fizzbuzz.nes', 'raw/foobar.lnx', 'raw/four.rom', 'raw/loremipsum.rom', 'raw/one.rom', 'raw/three.rom', 'raw/two.rom', 'raw/unknown.rom'], ], [ 'tar/*', @@ -1304,7 +1354,7 @@ describe('raw', () => { [ 'zip/*', ['fizzbuzz.zip', 'foobar.zip', 'fourfive.zip', 'loremipsum.zip', 'onetwothree.zip', 'unknown.zip'], - ['zip/fizzbuzz.zip', 'zip/foobar.zip', 'zip/loremipsum.zip', 'zip/onetwothree.zip', 'zip/unknown.zip'], + ['zip/fizzbuzz.zip', 'zip/foobar.zip', 'zip/fourfive.zip', 'zip/loremipsum.zip', 'zip/onetwothree.zip', 'zip/unknown.zip'], ], ])('should move raw and test: %s', async (inputGlob, expectedOutputPaths, expectedDeletedInputPaths) => { await copyFixturesToTemp(async (inputTemp, outputTemp) => { @@ -1335,12 +1385,10 @@ describe('raw', () => { // File wasn't deleted, ensure it wasn't touched expect(statsAfter).toEqual(statsBefore); }); - romFilesBefore + expect(romFilesBefore .filter(([inputFile]) => !romFilesAfter.has(inputFile)) - .forEach(([inputFile]) => { - // File was deleted, ensure it was expected - expect(expectedDeletedInputPaths).toContain(inputFile.replace(/[\\/]/g, '/')); - }); + .map(([inputFile]) => inputFile.replace(/[\\/]/g, '/'))) + .toIncludeSameMembers(expectedDeletedInputPaths); }); }); }); diff --git a/test/modules/datFilter.test.ts b/test/modules/datFilter.test.ts index d6f7c16c0..8cf21a063 100644 --- a/test/modules/datFilter.test.ts +++ b/test/modules/datFilter.test.ts @@ -11,13 +11,13 @@ function buildDATFilter(options: OptionsProps = {}): DATFilter { return new DATFilter(new Options(options), new ProgressBarFake()); } -async function expectFilteredDAT( +function expectFilteredDAT( options: OptionsProps, gamesArr: Game[][], expectedGameCount: number, -): Promise { +): void { const dat = new LogiqxDAT(new Header(), gamesArr.flat()); - const filteredDat = await buildDATFilter(options).filter(dat); + const filteredDat = buildDATFilter(options).filter(dat); expect(filteredDat.getGames().length).toEqual(expectedGameCount); } @@ -74,43 +74,43 @@ function buildGameWithRegionLanguage( return games; } -it('should return nothing if no parents exist', async () => { - await expectFilteredDAT({}, [], 0); +it('should return nothing if no parents exist', () => { + expectFilteredDAT({}, [], 0); }); -it('should return nothing if no parent has release candidates', async () => { - await expectFilteredDAT({}, [ +it('should return nothing if no parent has release candidates', () => { + expectFilteredDAT({}, [ buildGameWithRegionLanguage(['one', 'two', 'three'], [], []), ], 0); }); describe('filter', () => { - it('should return all candidates if no filter', async () => { - await expectFilteredDAT({}, [ + it('should return all candidates if no filter', () => { + expectFilteredDAT({}, [ buildGameWithRegionLanguage('one', 'USA', 'EN'), ], 1); - await expectFilteredDAT({}, [ + expectFilteredDAT({}, [ buildGameWithRegionLanguage('one', 'USA', 'EN'), buildGameWithRegionLanguage('two', 'JPN', 'JA'), ], 2); }); - it('should return no candidates if none given', async () => { - await expectFilteredDAT({ + it('should return no candidates if none given', () => { + expectFilteredDAT({ filterLanguage: [], }, [], 0); - await expectFilteredDAT({ + expectFilteredDAT({ filterLanguage: ['ZH'], }, [], 0); - await expectFilteredDAT({ + expectFilteredDAT({ filterLanguage: ['ZH', 'DE'], }, [], 0); }); - it('should not re-elect a new parent if not filtered out', async () => { + it('should not re-elect a new parent if not filtered out', () => { const options = new Options({ filterRegion: ['EUR'], }); @@ -129,7 +129,7 @@ describe('filter', () => { const dat = new LogiqxDAT(new Header(), [parent, ...children]); expect(dat.getParents()).toHaveLength(1); - const filteredDat = await new DATFilter(options, new ProgressBarFake()).filter(dat); + const filteredDat = new DATFilter(options, new ProgressBarFake()).filter(dat); expect(filteredDat.getParents()).toHaveLength(1); expect(filteredDat.getGames().map((game) => game.getName())).toEqual([ @@ -143,7 +143,7 @@ describe('filter', () => { .every((game) => game.getParent() === 'Legend of Zelda, The (Europe) (Rev 1)')).toEqual(true); }); - it('should not leave children abandoned', async () => { + it('should not leave children abandoned', () => { const options = new Options({ filterRegion: ['USA', 'WORLD'], }); @@ -162,7 +162,7 @@ describe('filter', () => { const dat = new LogiqxDAT(new Header(), [parent, ...children]); expect(dat.getParents()).toHaveLength(1); - const filteredDat = await new DATFilter(options, new ProgressBarFake()).filter(dat); + const filteredDat = new DATFilter(options, new ProgressBarFake()).filter(dat); expect(filteredDat.getParents()).toHaveLength(1); expect(filteredDat.getGames().map((game) => game.getName())).toEqual([ @@ -183,8 +183,8 @@ describe('filter', () => { 'ONE', 'four', '[xyz]', - ])('should return no candidates if none matching: %s', async (filterRegex) => { - await expectFilteredDAT({ filterRegex }, [ + ])('should return no candidates if none matching: %s', (filterRegex) => { + expectFilteredDAT({ filterRegex }, [ buildGameWithRegionLanguage('one'), buildGameWithRegionLanguage('two'), buildGameWithRegionLanguage('three'), @@ -195,8 +195,8 @@ describe('filter', () => { '/ONE/i', 'two', 'o$', - ])('should return one candidate if one matching: %s', async (filterRegex) => { - await expectFilteredDAT({ filterRegex }, [ + ])('should return one candidate if one matching: %s', (filterRegex) => { + expectFilteredDAT({ filterRegex }, [ buildGameWithRegionLanguage('one'), buildGameWithRegionLanguage('two'), buildGameWithRegionLanguage('three'), @@ -206,8 +206,8 @@ describe('filter', () => { test.each([ '(one|two|three)', '[aeiou]', - ])('should return all candidates if all matching: %s', async (filterRegex) => { - await expectFilteredDAT({ filterRegex }, [ + ])('should return all candidates if all matching: %s', (filterRegex) => { + expectFilteredDAT({ filterRegex }, [ buildGameWithRegionLanguage('one'), buildGameWithRegionLanguage('two'), buildGameWithRegionLanguage('three'), @@ -219,8 +219,8 @@ describe('filter', () => { test.each([ '(one|two|three)', '[aeiou]', - ])('should return no candidates if all matching: %s', async (filterRegexExclude) => { - await expectFilteredDAT({ filterRegexExclude }, [ + ])('should return no candidates if all matching: %s', (filterRegexExclude) => { + expectFilteredDAT({ filterRegexExclude }, [ buildGameWithRegionLanguage('one'), buildGameWithRegionLanguage('two'), buildGameWithRegionLanguage('three'), @@ -231,8 +231,8 @@ describe('filter', () => { '(two|three)', 't', '/E/i', - ])('should return one candidate if two matching: %s', async (filterRegexExclude) => { - await expectFilteredDAT({ filterRegexExclude }, [ + ])('should return one candidate if two matching: %s', (filterRegexExclude) => { + expectFilteredDAT({ filterRegexExclude }, [ buildGameWithRegionLanguage('one'), buildGameWithRegionLanguage('two'), buildGameWithRegionLanguage('three'), @@ -243,8 +243,8 @@ describe('filter', () => { 'ONE', 'four', '[xyz]', - ])('should return all candidates if none matching: %s', async (filterRegexExclude) => { - await expectFilteredDAT({ filterRegexExclude }, [ + ])('should return all candidates if none matching: %s', (filterRegexExclude) => { + expectFilteredDAT({ filterRegexExclude }, [ buildGameWithRegionLanguage('one'), buildGameWithRegionLanguage('two'), buildGameWithRegionLanguage('three'), @@ -253,26 +253,26 @@ describe('filter', () => { }); describe('language filter', () => { - it('should return no candidates if none matching', async () => { - await expectFilteredDAT({ + it('should return no candidates if none matching', () => { + expectFilteredDAT({ filterLanguage: ['ZH'], }, [ buildGameWithRegionLanguage('one', 'USA', 'EN'), ], 0); - await expectFilteredDAT({ + expectFilteredDAT({ filterLanguage: ['ZH'], }, [ buildGameWithRegionLanguage('one', 'EUR', undefined), ], 0); - await expectFilteredDAT({ + expectFilteredDAT({ filterLanguage: ['ZH'], }, [ buildGameWithRegionLanguage('one (En,Fr,De)', 'EUR', undefined), ], 0); - await expectFilteredDAT({ + expectFilteredDAT({ filterLanguage: ['ZH'], }, [ buildGameWithRegionLanguage('one', 'USA', 'EN'), @@ -281,8 +281,8 @@ describe('filter', () => { ], 0); }); - it('should return some candidates if some matching', async () => { - await expectFilteredDAT({ + it('should return some candidates if some matching', () => { + expectFilteredDAT({ filterLanguage: ['ZH'], }, [ buildGameWithRegionLanguage('one', 'USA', 'EN'), @@ -290,7 +290,7 @@ describe('filter', () => { buildGameWithRegionLanguage('three', 'EUR', ['DE', 'IT', 'EN']), ], 1); - await expectFilteredDAT({ + expectFilteredDAT({ filterLanguage: ['ZH'], }, [ buildGameWithRegionLanguage('one', 'USA', undefined), @@ -298,7 +298,7 @@ describe('filter', () => { buildGameWithRegionLanguage('three', 'EUR', undefined), ], 1); - await expectFilteredDAT({ + expectFilteredDAT({ filterLanguage: ['EN', 'ZH'], }, [ buildGameWithRegionLanguage('one', 'USA', 'EN'), @@ -307,40 +307,40 @@ describe('filter', () => { ], 2); }); - it('should return all candidates if all matching', async () => { - await expectFilteredDAT({ + it('should return all candidates if all matching', () => { + expectFilteredDAT({ filterLanguage: ['EN'], }, [ buildGameWithRegionLanguage('one', 'USA', 'EN'), ], 1); - await expectFilteredDAT({ + expectFilteredDAT({ filterLanguage: ['EN'], }, [ buildGameWithRegionLanguage('one', 'EUR', undefined), ], 1); - await expectFilteredDAT({ + expectFilteredDAT({ filterLanguage: ['EN'], }, [ buildGameWithRegionLanguage('one (En,Fr,De)', 'EUR', undefined), ], 1); - await expectFilteredDAT({ + expectFilteredDAT({ filterLanguage: ['EN', 'ZH'], }, [ buildGameWithRegionLanguage('one', 'USA', 'EN'), buildGameWithRegionLanguage('two', 'CHN', 'ZH'), ], 2); - await expectFilteredDAT({ + expectFilteredDAT({ filterLanguage: ['EN', 'ZH'], }, [ buildGameWithRegionLanguage('one', 'USA', undefined), buildGameWithRegionLanguage('two', 'CHN', undefined), ], 2); - await expectFilteredDAT({ + expectFilteredDAT({ filterLanguage: ['EN', 'JA'], }, [ buildGameWithRegionLanguage('one', 'USA', 'EN'), @@ -351,14 +351,14 @@ describe('filter', () => { }); describe('region filter', () => { - it('should return no candidates if none matching', async () => { - await expectFilteredDAT({ + it('should return no candidates if none matching', () => { + expectFilteredDAT({ filterRegion: ['EUR'], }, [ buildGameWithRegionLanguage('one', 'USA', 'EN'), ], 0); - await expectFilteredDAT({ + expectFilteredDAT({ filterRegion: ['CHN'], }, [ buildGameWithRegionLanguage('one', ['USA', 'CAN'], 'EN'), @@ -367,8 +367,8 @@ describe('filter', () => { ], 0); }); - it('should return some candidates if some matching', async () => { - await expectFilteredDAT({ + it('should return some candidates if some matching', () => { + expectFilteredDAT({ filterRegion: ['USA'], }, [ buildGameWithRegionLanguage('one', 'USA', 'EN'), @@ -376,7 +376,7 @@ describe('filter', () => { buildGameWithRegionLanguage('three', 'EUR', ['DE', 'IT', 'EN']), ], 1); - await expectFilteredDAT({ + expectFilteredDAT({ filterRegion: ['CAN', 'ASI'], }, [ buildGameWithRegionLanguage('one', ['USA', 'CAN'], 'EN'), @@ -385,21 +385,21 @@ describe('filter', () => { ], 4); }); - it('should return all candidates if all matching', async () => { - await expectFilteredDAT({ + it('should return all candidates if all matching', () => { + expectFilteredDAT({ filterRegion: ['USA'], }, [ buildGameWithRegionLanguage('one', 'USA', 'EN'), ], 1); - await expectFilteredDAT({ + expectFilteredDAT({ filterRegion: ['USA', 'CHN'], }, [ buildGameWithRegionLanguage('one', 'USA', 'EN'), buildGameWithRegionLanguage('two', 'CHN', 'ZH'), ], 2); - await expectFilteredDAT({ + expectFilteredDAT({ filterRegion: ['USA', 'JPN'], }, [ buildGameWithRegionLanguage('one', 'USA', 'EN'), @@ -410,128 +410,128 @@ describe('filter', () => { }); describe('bios', () => { - it('option is false', async () => { + it('option is false', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one', 'USA', 'EN', { bios: 'no' }), buildGameWithRegionLanguage('two', 'USA', 'EN', { bios: 'yes' }), buildGameWithRegionLanguage('three', 'USA', 'EN', { bios: 'no' }), ]; - await expectFilteredDAT({ noBios: false }, parentsToCandidates, 3); - await expectFilteredDAT({ onlyBios: false }, parentsToCandidates, 3); + expectFilteredDAT({ noBios: false }, parentsToCandidates, 3); + expectFilteredDAT({ onlyBios: false }, parentsToCandidates, 3); }); - it('all games are BIOS', async () => { + it('all games are BIOS', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one', 'USA', 'EN', { bios: 'yes' }), buildGameWithRegionLanguage('two [BIOS]', 'USA', 'EN', { bios: 'no' }), ]; - await expectFilteredDAT({ noBios: true }, parentsToCandidates, 0); - await expectFilteredDAT({ onlyBios: true }, parentsToCandidates, 2); + expectFilteredDAT({ noBios: true }, parentsToCandidates, 0); + expectFilteredDAT({ onlyBios: true }, parentsToCandidates, 2); }); - it('some games are BIOS', async () => { + it('some games are BIOS', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one', 'USA', 'EN', { bios: 'no' }), buildGameWithRegionLanguage('two', 'USA', 'EN', { bios: 'yes' }), buildGameWithRegionLanguage('three', 'USA', 'EN', { bios: 'no' }), ]; - await expectFilteredDAT({ noBios: true }, parentsToCandidates, 2); - await expectFilteredDAT({ onlyBios: true }, parentsToCandidates, 1); + expectFilteredDAT({ noBios: true }, parentsToCandidates, 2); + expectFilteredDAT({ onlyBios: true }, parentsToCandidates, 1); }); - it('no games are BIOS', async () => { + it('no games are BIOS', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one', 'USA', 'EN', { bios: 'no' }), buildGameWithRegionLanguage('two', 'USA', 'EN', { bios: 'no' }), ]; - await expectFilteredDAT({ noBios: true }, parentsToCandidates, 2); - await expectFilteredDAT({ onlyBios: true }, parentsToCandidates, 0); + expectFilteredDAT({ noBios: true }, parentsToCandidates, 2); + expectFilteredDAT({ onlyBios: true }, parentsToCandidates, 0); }); }); describe('device', () => { - it('option is false', async () => { + it('option is false', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one', 'USA', 'EN', { device: 'no' }), buildGameWithRegionLanguage('two', 'USA', 'EN', { device: 'yes' }), buildGameWithRegionLanguage('three', 'USA', 'EN', { device: 'no' }), ]; - await expectFilteredDAT({ noDevice: false }, parentsToCandidates, 3); - await expectFilteredDAT({ onlyDevice: false }, parentsToCandidates, 3); + expectFilteredDAT({ noDevice: false }, parentsToCandidates, 3); + expectFilteredDAT({ onlyDevice: false }, parentsToCandidates, 3); }); - it('all games are device', async () => { + it('all games are device', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one', 'USA', 'EN', { device: 'yes' }), buildGameWithRegionLanguage('two', 'USA', 'EN', { device: 'yes' }), ]; - await expectFilteredDAT({ noDevice: true }, parentsToCandidates, 0); - await expectFilteredDAT({ onlyDevice: true }, parentsToCandidates, 2); + expectFilteredDAT({ noDevice: true }, parentsToCandidates, 0); + expectFilteredDAT({ onlyDevice: true }, parentsToCandidates, 2); }); - it('some games are device', async () => { + it('some games are device', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one', 'USA', 'EN', { device: 'no' }), buildGameWithRegionLanguage('two', 'USA', 'EN', { device: 'yes' }), buildGameWithRegionLanguage('three', 'USA', 'EN', { device: 'no' }), ]; - await expectFilteredDAT({ noDevice: true }, parentsToCandidates, 2); - await expectFilteredDAT({ onlyDevice: true }, parentsToCandidates, 1); + expectFilteredDAT({ noDevice: true }, parentsToCandidates, 2); + expectFilteredDAT({ onlyDevice: true }, parentsToCandidates, 1); }); - it('no games are device', async () => { + it('no games are device', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one', 'USA', 'EN', { device: 'no' }), buildGameWithRegionLanguage('two', 'USA', 'EN', { device: 'no' }), ]; - await expectFilteredDAT({ noDevice: true }, parentsToCandidates, 2); - await expectFilteredDAT({ onlyDevice: true }, parentsToCandidates, 0); + expectFilteredDAT({ noDevice: true }, parentsToCandidates, 2); + expectFilteredDAT({ onlyDevice: true }, parentsToCandidates, 0); }); }); describe('unlicensed', () => { - it('option is false', async () => { + it('option is false', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one', 'USA', 'EN'), buildGameWithRegionLanguage('two (Unl)', 'USA', 'EN'), buildGameWithRegionLanguage('three (Unlicensed)', 'USA', 'EN'), ]; - await expectFilteredDAT({ noUnlicensed: false }, parentsToCandidates, 3); - await expectFilteredDAT({ onlyUnlicensed: false }, parentsToCandidates, 3); + expectFilteredDAT({ noUnlicensed: false }, parentsToCandidates, 3); + expectFilteredDAT({ onlyUnlicensed: false }, parentsToCandidates, 3); }); - it('all games are unlicensed', async () => { + it('all games are unlicensed', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one (Unl)', 'USA', 'EN'), buildGameWithRegionLanguage('two (Unlicensed)', 'USA', 'EN'), ]; - await expectFilteredDAT({ noUnlicensed: true }, parentsToCandidates, 0); - await expectFilteredDAT({ onlyUnlicensed: true }, parentsToCandidates, 2); + expectFilteredDAT({ noUnlicensed: true }, parentsToCandidates, 0); + expectFilteredDAT({ onlyUnlicensed: true }, parentsToCandidates, 2); }); - it('some games are unlicensed', async () => { + it('some games are unlicensed', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one (Unlicensed)', 'USA', 'EN'), buildGameWithRegionLanguage('two', 'USA', 'EN'), buildGameWithRegionLanguage('three (Unl)', 'USA', 'EN'), ]; - await expectFilteredDAT({ noUnlicensed: true }, parentsToCandidates, 1); - await expectFilteredDAT({ onlyUnlicensed: true }, parentsToCandidates, 2); + expectFilteredDAT({ noUnlicensed: true }, parentsToCandidates, 1); + expectFilteredDAT({ onlyUnlicensed: true }, parentsToCandidates, 2); }); - it('no games are unlicensed', async () => { + it('no games are unlicensed', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one', 'USA', 'EN'), buildGameWithRegionLanguage('two', 'USA', 'EN'), ]; - await expectFilteredDAT({ noUnlicensed: true }, parentsToCandidates, 2); - await expectFilteredDAT({ onlyUnlicensed: true }, parentsToCandidates, 0); + expectFilteredDAT({ noUnlicensed: true }, parentsToCandidates, 2); + expectFilteredDAT({ onlyUnlicensed: true }, parentsToCandidates, 0); }); }); describe('only retail', () => { - it('should return all candidates when option is false', async () => { - await expectFilteredDAT({ onlyRetail: false }, [ + it('should return all candidates when option is false', () => { + expectFilteredDAT({ onlyRetail: false }, [ buildGameWithRegionLanguage('one', 'USA', 'EN'), buildGameWithRegionLanguage('two (Aftermarket)', 'USA', 'EN'), buildGameWithRegionLanguage('three [b]', 'USA', 'EN'), @@ -545,8 +545,8 @@ describe('filter', () => { ], 10); }); - it('should return no candidates if none matching', async () => { - await expectFilteredDAT({ onlyRetail: true }, [ + it('should return no candidates if none matching', () => { + expectFilteredDAT({ onlyRetail: true }, [ buildGameWithRegionLanguage('two (Aftermarket)', 'USA', 'EN'), buildGameWithRegionLanguage('three [b]', 'USA', 'EN'), buildGameWithRegionLanguage('four (Beta)', 'USA', 'EN'), @@ -559,8 +559,8 @@ describe('filter', () => { ], 0); }); - it('should return some candidates if some matching', async () => { - await expectFilteredDAT({ onlyRetail: true }, [ + it('should return some candidates if some matching', () => { + expectFilteredDAT({ onlyRetail: true }, [ buildGameWithRegionLanguage('one', 'USA', 'EN'), buildGameWithRegionLanguage('two (Aftermarket)', 'USA', 'EN'), buildGameWithRegionLanguage('three [b]', 'USA', 'EN'), @@ -575,8 +575,8 @@ describe('filter', () => { ], 2); }); - it('should return all candidates if all matching', async () => { - await expectFilteredDAT({ onlyRetail: true }, [ + it('should return all candidates if all matching', () => { + expectFilteredDAT({ onlyRetail: true }, [ buildGameWithRegionLanguage('one', 'USA', 'EN'), buildGameWithRegionLanguage('two', 'USA', 'EN'), ], 2); @@ -584,401 +584,401 @@ describe('filter', () => { }); describe('debug', () => { - it('option is false', async () => { + it('option is false', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one', 'USA', 'EN'), buildGameWithRegionLanguage('two (Debug)', 'USA', 'EN'), buildGameWithRegionLanguage('three (Debug Version)', 'USA', 'EN'), ]; - await expectFilteredDAT({ noDebug: false }, parentsToCandidates, 3); - await expectFilteredDAT({ onlyDebug: false }, parentsToCandidates, 3); + expectFilteredDAT({ noDebug: false }, parentsToCandidates, 3); + expectFilteredDAT({ onlyDebug: false }, parentsToCandidates, 3); }); - it('all games are debug', async () => { + it('all games are debug', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one (Debug)', 'USA', 'EN'), buildGameWithRegionLanguage('two (Debug Version)', 'USA', 'EN'), ]; - await expectFilteredDAT({ noDebug: true }, parentsToCandidates, 0); - await expectFilteredDAT({ onlyDebug: true }, parentsToCandidates, 2); + expectFilteredDAT({ noDebug: true }, parentsToCandidates, 0); + expectFilteredDAT({ onlyDebug: true }, parentsToCandidates, 2); }); - it('some games are debug', async () => { + it('some games are debug', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one (Debug Version)', 'USA', 'EN'), buildGameWithRegionLanguage('two', 'USA', 'EN'), buildGameWithRegionLanguage('three (Debug)', 'USA', 'EN'), ]; - await expectFilteredDAT({ noDebug: true }, parentsToCandidates, 1); - await expectFilteredDAT({ onlyDebug: true }, parentsToCandidates, 2); + expectFilteredDAT({ noDebug: true }, parentsToCandidates, 1); + expectFilteredDAT({ onlyDebug: true }, parentsToCandidates, 2); }); - it('no games are debug', async () => { + it('no games are debug', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one', 'USA', 'EN'), buildGameWithRegionLanguage('two', 'USA', 'EN'), ]; - await expectFilteredDAT({ noDebug: true }, parentsToCandidates, 2); - await expectFilteredDAT({ onlyDebug: true }, parentsToCandidates, 0); + expectFilteredDAT({ noDebug: true }, parentsToCandidates, 2); + expectFilteredDAT({ onlyDebug: true }, parentsToCandidates, 0); }); }); describe('demo', () => { - it('option is false', async () => { + it('option is false', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one', 'USA', 'EN'), buildGameWithRegionLanguage('two (Demo)', 'USA', 'EN'), buildGameWithRegionLanguage('three (Demo 2000)', 'USA', 'EN'), ]; - await expectFilteredDAT({ noDemo: false }, parentsToCandidates, 3); - await expectFilteredDAT({ onlyDemo: false }, parentsToCandidates, 3); + expectFilteredDAT({ noDemo: false }, parentsToCandidates, 3); + expectFilteredDAT({ onlyDemo: false }, parentsToCandidates, 3); }); - it('all games are demo', async () => { + it('all games are demo', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one (Demo)', 'USA', 'EN'), buildGameWithRegionLanguage('two (Demo 2000)', 'USA', 'EN'), ]; - await expectFilteredDAT({ noDemo: true }, parentsToCandidates, 0); - await expectFilteredDAT({ onlyDemo: true }, parentsToCandidates, 2); + expectFilteredDAT({ noDemo: true }, parentsToCandidates, 0); + expectFilteredDAT({ onlyDemo: true }, parentsToCandidates, 2); }); - it('some games are demo', async () => { + it('some games are demo', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one (Demo 2000)', 'USA', 'EN'), buildGameWithRegionLanguage('two', 'USA', 'EN'), buildGameWithRegionLanguage('three (Demo)', 'USA', 'EN'), ]; - await expectFilteredDAT({ noDemo: true }, parentsToCandidates, 1); - await expectFilteredDAT({ onlyDemo: true }, parentsToCandidates, 2); + expectFilteredDAT({ noDemo: true }, parentsToCandidates, 1); + expectFilteredDAT({ onlyDemo: true }, parentsToCandidates, 2); }); - it('no games are demo', async () => { + it('no games are demo', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one', 'USA', 'EN'), buildGameWithRegionLanguage('two', 'USA', 'EN'), ]; - await expectFilteredDAT({ noDemo: true }, parentsToCandidates, 2); - await expectFilteredDAT({ onlyDemo: true }, parentsToCandidates, 0); + expectFilteredDAT({ noDemo: true }, parentsToCandidates, 2); + expectFilteredDAT({ onlyDemo: true }, parentsToCandidates, 0); }); }); describe('beta', () => { - it('option is false', async () => { + it('option is false', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one', 'USA', 'EN'), buildGameWithRegionLanguage('two (Beta)', 'USA', 'EN'), buildGameWithRegionLanguage('three (Beta v1.0)', 'USA', 'EN'), ]; - await expectFilteredDAT({ noBeta: false }, parentsToCandidates, 3); - await expectFilteredDAT({ onlyBeta: false }, parentsToCandidates, 3); + expectFilteredDAT({ noBeta: false }, parentsToCandidates, 3); + expectFilteredDAT({ onlyBeta: false }, parentsToCandidates, 3); }); - it('all games are beta', async () => { + it('all games are beta', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one (Beta)', 'USA', 'EN'), buildGameWithRegionLanguage('two (Beta v1.0)', 'USA', 'EN'), ]; - await expectFilteredDAT({ noBeta: true }, parentsToCandidates, 0); - await expectFilteredDAT({ onlyBeta: true }, parentsToCandidates, 2); + expectFilteredDAT({ noBeta: true }, parentsToCandidates, 0); + expectFilteredDAT({ onlyBeta: true }, parentsToCandidates, 2); }); - it('some games are beta', async () => { + it('some games are beta', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one (Beta v1.0)', 'USA', 'EN'), buildGameWithRegionLanguage('two', 'USA', 'EN'), buildGameWithRegionLanguage('three (Beta)', 'USA', 'EN'), ]; - await expectFilteredDAT({ noBeta: true }, parentsToCandidates, 1); - await expectFilteredDAT({ onlyBeta: true }, parentsToCandidates, 2); + expectFilteredDAT({ noBeta: true }, parentsToCandidates, 1); + expectFilteredDAT({ onlyBeta: true }, parentsToCandidates, 2); }); - it('no games are beta', async () => { + it('no games are beta', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one', 'USA', 'EN'), buildGameWithRegionLanguage('two', 'USA', 'EN'), ]; - await expectFilteredDAT({ noBeta: true }, parentsToCandidates, 2); - await expectFilteredDAT({ onlyBeta: true }, parentsToCandidates, 0); + expectFilteredDAT({ noBeta: true }, parentsToCandidates, 2); + expectFilteredDAT({ onlyBeta: true }, parentsToCandidates, 0); }); }); describe('sample', () => { - it('option is false', async () => { + it('option is false', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one', 'USA', 'EN'), buildGameWithRegionLanguage('two (Sample)', 'USA', 'EN'), buildGameWithRegionLanguage('three (Sample Copy)', 'USA', 'EN'), ]; - await expectFilteredDAT({ noSample: false }, parentsToCandidates, 3); - await expectFilteredDAT({ onlySample: false }, parentsToCandidates, 3); + expectFilteredDAT({ noSample: false }, parentsToCandidates, 3); + expectFilteredDAT({ onlySample: false }, parentsToCandidates, 3); }); - it('all games are sample', async () => { + it('all games are sample', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one (Sample)', 'USA', 'EN'), buildGameWithRegionLanguage('two (Sample Copy)', 'USA', 'EN'), ]; - await expectFilteredDAT({ noSample: true }, parentsToCandidates, 0); - await expectFilteredDAT({ onlySample: true }, parentsToCandidates, 2); + expectFilteredDAT({ noSample: true }, parentsToCandidates, 0); + expectFilteredDAT({ onlySample: true }, parentsToCandidates, 2); }); - it('some games are sample', async () => { + it('some games are sample', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one (Sample Copy)', 'USA', 'EN'), buildGameWithRegionLanguage('two', 'USA', 'EN'), buildGameWithRegionLanguage('three (Sample)', 'USA', 'EN'), ]; - await expectFilteredDAT({ noSample: true }, parentsToCandidates, 1); - await expectFilteredDAT({ onlySample: true }, parentsToCandidates, 2); + expectFilteredDAT({ noSample: true }, parentsToCandidates, 1); + expectFilteredDAT({ onlySample: true }, parentsToCandidates, 2); }); - it('no games are sample', async () => { + it('no games are sample', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one', 'USA', 'EN'), buildGameWithRegionLanguage('two', 'USA', 'EN'), ]; - await expectFilteredDAT({ noSample: true }, parentsToCandidates, 2); - await expectFilteredDAT({ onlySample: true }, parentsToCandidates, 0); + expectFilteredDAT({ noSample: true }, parentsToCandidates, 2); + expectFilteredDAT({ onlySample: true }, parentsToCandidates, 0); }); }); describe('prototype', () => { - it('option is false', async () => { + it('option is false', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one', 'USA', 'EN'), buildGameWithRegionLanguage('two (Proto)', 'USA', 'EN'), buildGameWithRegionLanguage('three (Prototype)', 'USA', 'EN'), ]; - await expectFilteredDAT({ noPrototype: false }, parentsToCandidates, 3); - await expectFilteredDAT({ onlyPrototype: false }, parentsToCandidates, 3); + expectFilteredDAT({ noPrototype: false }, parentsToCandidates, 3); + expectFilteredDAT({ onlyPrototype: false }, parentsToCandidates, 3); }); - it('all games are prototype', async () => { + it('all games are prototype', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one (Proto)', 'USA', 'EN'), buildGameWithRegionLanguage('two (Prototype)', 'USA', 'EN'), ]; - await expectFilteredDAT({ noPrototype: true }, parentsToCandidates, 0); - await expectFilteredDAT({ onlyPrototype: true }, parentsToCandidates, 2); + expectFilteredDAT({ noPrototype: true }, parentsToCandidates, 0); + expectFilteredDAT({ onlyPrototype: true }, parentsToCandidates, 2); }); - it('some games are prototype', async () => { + it('some games are prototype', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one (Prototype)', 'USA', 'EN'), buildGameWithRegionLanguage('two', 'USA', 'EN'), buildGameWithRegionLanguage('three (Proto)', 'USA', 'EN'), ]; - await expectFilteredDAT({ noPrototype: true }, parentsToCandidates, 1); - await expectFilteredDAT({ onlyPrototype: true }, parentsToCandidates, 2); + expectFilteredDAT({ noPrototype: true }, parentsToCandidates, 1); + expectFilteredDAT({ onlyPrototype: true }, parentsToCandidates, 2); }); - it('no games are prototype', async () => { + it('no games are prototype', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one', 'USA', 'EN'), buildGameWithRegionLanguage('two', 'USA', 'EN'), ]; - await expectFilteredDAT({ noPrototype: true }, parentsToCandidates, 2); - await expectFilteredDAT({ onlyPrototype: true }, parentsToCandidates, 0); + expectFilteredDAT({ noPrototype: true }, parentsToCandidates, 2); + expectFilteredDAT({ onlyPrototype: true }, parentsToCandidates, 0); }); }); describe('program', () => { - it('option is false', async () => { + it('option is false', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one', 'USA', 'EN'), buildGameWithRegionLanguage('two (Program)', 'USA', 'EN'), buildGameWithRegionLanguage('three (Test Program)', 'USA', 'EN'), ]; - await expectFilteredDAT({ noProgram: false }, parentsToCandidates, 3); - await expectFilteredDAT({ onlyProgram: false }, parentsToCandidates, 3); + expectFilteredDAT({ noProgram: false }, parentsToCandidates, 3); + expectFilteredDAT({ onlyProgram: false }, parentsToCandidates, 3); }); - it('all games are programs', async () => { + it('all games are programs', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one (Program)', 'USA', 'EN'), buildGameWithRegionLanguage('two (Test Program)', 'USA', 'EN'), ]; - await expectFilteredDAT({ noProgram: true }, parentsToCandidates, 0); - await expectFilteredDAT({ onlyProgram: true }, parentsToCandidates, 2); + expectFilteredDAT({ noProgram: true }, parentsToCandidates, 0); + expectFilteredDAT({ onlyProgram: true }, parentsToCandidates, 2); }); - it('some games are programs', async () => { + it('some games are programs', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one (Test Program)', 'USA', 'EN'), buildGameWithRegionLanguage('two', 'USA', 'EN'), buildGameWithRegionLanguage('three (Program)', 'USA', 'EN'), ]; - await expectFilteredDAT({ noProgram: true }, parentsToCandidates, 1); - await expectFilteredDAT({ onlyProgram: true }, parentsToCandidates, 2); + expectFilteredDAT({ noProgram: true }, parentsToCandidates, 1); + expectFilteredDAT({ onlyProgram: true }, parentsToCandidates, 2); }); - it('no games are programs', async () => { + it('no games are programs', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one', 'USA', 'EN'), buildGameWithRegionLanguage('two', 'USA', 'EN'), ]; - await expectFilteredDAT({ noProgram: true }, parentsToCandidates, 2); - await expectFilteredDAT({ onlyProgram: true }, parentsToCandidates, 0); + expectFilteredDAT({ noProgram: true }, parentsToCandidates, 2); + expectFilteredDAT({ onlyProgram: true }, parentsToCandidates, 0); }); }); describe('aftermarket', () => { - it('option is false', async () => { + it('option is false', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one', 'USA', 'EN'), buildGameWithRegionLanguage('two (Aftermarket)', 'USA', 'EN'), buildGameWithRegionLanguage('three (Aftermarket Version)', 'USA', 'EN'), ]; - await expectFilteredDAT({ noAftermarket: false }, parentsToCandidates, 3); - await expectFilteredDAT({ onlyAftermarket: false }, parentsToCandidates, 3); + expectFilteredDAT({ noAftermarket: false }, parentsToCandidates, 3); + expectFilteredDAT({ onlyAftermarket: false }, parentsToCandidates, 3); }); - it('all games are aftermarket', async () => { + it('all games are aftermarket', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one (Aftermarket)', 'USA', 'EN'), buildGameWithRegionLanguage('two (Aftermarket Version)', 'USA', 'EN'), ]; - await expectFilteredDAT({ noAftermarket: true }, parentsToCandidates, 0); - await expectFilteredDAT({ onlyAftermarket: true }, parentsToCandidates, 2); + expectFilteredDAT({ noAftermarket: true }, parentsToCandidates, 0); + expectFilteredDAT({ onlyAftermarket: true }, parentsToCandidates, 2); }); - it('some games are aftermarket', async () => { + it('some games are aftermarket', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one (Aftermarket Version)', 'USA', 'EN'), buildGameWithRegionLanguage('two', 'USA', 'EN'), buildGameWithRegionLanguage('three (Aftermarket)', 'USA', 'EN'), ]; - await expectFilteredDAT({ noAftermarket: true }, parentsToCandidates, 1); - await expectFilteredDAT({ onlyAftermarket: true }, parentsToCandidates, 2); + expectFilteredDAT({ noAftermarket: true }, parentsToCandidates, 1); + expectFilteredDAT({ onlyAftermarket: true }, parentsToCandidates, 2); }); - it('no games are aftermarket', async () => { + it('no games are aftermarket', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one', 'USA', 'EN'), buildGameWithRegionLanguage('two', 'USA', 'EN'), ]; - await expectFilteredDAT({ noAftermarket: true }, parentsToCandidates, 2); - await expectFilteredDAT({ onlyAftermarket: true }, parentsToCandidates, 0); + expectFilteredDAT({ noAftermarket: true }, parentsToCandidates, 2); + expectFilteredDAT({ onlyAftermarket: true }, parentsToCandidates, 0); }); }); describe('homebrew', () => { - it('option is false', async () => { + it('option is false', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one', 'USA', 'EN'), buildGameWithRegionLanguage('two (Homebrew)', 'USA', 'EN'), buildGameWithRegionLanguage('three (Homebrew Edition)', 'USA', 'EN'), ]; - await expectFilteredDAT({ noHomebrew: false }, parentsToCandidates, 3); - await expectFilteredDAT({ onlyHomebrew: false }, parentsToCandidates, 3); + expectFilteredDAT({ noHomebrew: false }, parentsToCandidates, 3); + expectFilteredDAT({ onlyHomebrew: false }, parentsToCandidates, 3); }); - it('all games are homebrew', async () => { + it('all games are homebrew', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one (Homebrew)', 'USA', 'EN'), buildGameWithRegionLanguage('two (Homebrew Edition)', 'USA', 'EN'), ]; - await expectFilteredDAT({ noHomebrew: true }, parentsToCandidates, 0); - await expectFilteredDAT({ onlyHomebrew: true }, parentsToCandidates, 2); + expectFilteredDAT({ noHomebrew: true }, parentsToCandidates, 0); + expectFilteredDAT({ onlyHomebrew: true }, parentsToCandidates, 2); }); - it('some games are homebrew', async () => { + it('some games are homebrew', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one (Homebrew Edition)', 'USA', 'EN'), buildGameWithRegionLanguage('two', 'USA', 'EN'), buildGameWithRegionLanguage('three (Homebrew)', 'USA', 'EN'), ]; - await expectFilteredDAT({ onlyHomebrew: true }, parentsToCandidates, 2); + expectFilteredDAT({ onlyHomebrew: true }, parentsToCandidates, 2); }); - it('no games are homebrew', async () => { + it('no games are homebrew', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one', 'USA', 'EN'), buildGameWithRegionLanguage('two', 'USA', 'EN'), ]; - await expectFilteredDAT({ noHomebrew: true }, parentsToCandidates, 2); - await expectFilteredDAT({ onlyHomebrew: true }, parentsToCandidates, 0); + expectFilteredDAT({ noHomebrew: true }, parentsToCandidates, 2); + expectFilteredDAT({ onlyHomebrew: true }, parentsToCandidates, 0); }); }); describe('verified', () => { - it('option is false', async () => { + it('option is false', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one', [], 'EN'), buildGameWithRegionLanguage('two [!]', [], 'EN'), buildGameWithRegionLanguage('three [!]', [], 'EN'), ]; - await expectFilteredDAT({ noUnverified: false }, parentsToCandidates, 3); - await expectFilteredDAT({ onlyUnverified: false }, parentsToCandidates, 3); + expectFilteredDAT({ noUnverified: false }, parentsToCandidates, 3); + expectFilteredDAT({ onlyUnverified: false }, parentsToCandidates, 3); }); - it('all games are verified', async () => { + it('all games are verified', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one', [], 'EN'), buildGameWithRegionLanguage('two', [], 'EN'), ]; - await expectFilteredDAT({ noUnverified: true }, parentsToCandidates, 0); - await expectFilteredDAT({ onlyUnverified: true }, parentsToCandidates, 2); + expectFilteredDAT({ noUnverified: true }, parentsToCandidates, 0); + expectFilteredDAT({ onlyUnverified: true }, parentsToCandidates, 2); }); - it('some games are verified', async () => { + it('some games are verified', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one [!]', [], 'EN'), buildGameWithRegionLanguage('two', [], 'EN'), buildGameWithRegionLanguage('three [!]', [], 'EN'), ]; - await expectFilteredDAT({ noUnverified: true }, parentsToCandidates, 2); - await expectFilteredDAT({ onlyUnverified: true }, parentsToCandidates, 1); + expectFilteredDAT({ noUnverified: true }, parentsToCandidates, 2); + expectFilteredDAT({ onlyUnverified: true }, parentsToCandidates, 1); }); - it('no games are verified', async () => { + it('no games are verified', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one [!]', [], 'EN'), buildGameWithRegionLanguage('two [!]', [], 'EN'), ]; - await expectFilteredDAT({ noUnverified: true }, parentsToCandidates, 2); - await expectFilteredDAT({ onlyUnverified: true }, parentsToCandidates, 0); + expectFilteredDAT({ noUnverified: true }, parentsToCandidates, 2); + expectFilteredDAT({ onlyUnverified: true }, parentsToCandidates, 0); }); }); describe('bad', () => { - it('option is false', async () => { + it('option is false', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one', 'USA', 'EN'), buildGameWithRegionLanguage('two [b]', 'USA', 'EN'), buildGameWithRegionLanguage('three [b]', 'USA', 'EN'), ]; - await expectFilteredDAT({ noBad: false }, parentsToCandidates, 3); - await expectFilteredDAT({ onlyBad: false }, parentsToCandidates, 3); + expectFilteredDAT({ noBad: false }, parentsToCandidates, 3); + expectFilteredDAT({ onlyBad: false }, parentsToCandidates, 3); }); - it('all games are bad', async () => { + it('all games are bad', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one [b]', 'USA', 'EN'), buildGameWithRegionLanguage('two [b]', 'USA', 'EN'), ]; - await expectFilteredDAT({ noBad: true }, parentsToCandidates, 0); - await expectFilteredDAT({ onlyBad: true }, parentsToCandidates, 2); + expectFilteredDAT({ noBad: true }, parentsToCandidates, 0); + expectFilteredDAT({ onlyBad: true }, parentsToCandidates, 2); }); - it('some games are bad', async () => { + it('some games are bad', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one [b]', 'USA', 'EN'), buildGameWithRegionLanguage('two', 'USA', 'EN'), buildGameWithRegionLanguage('three [b]', 'USA', 'EN'), ]; - await expectFilteredDAT({ noBad: true }, parentsToCandidates, 1); - await expectFilteredDAT({ onlyBad: true }, parentsToCandidates, 2); + expectFilteredDAT({ noBad: true }, parentsToCandidates, 1); + expectFilteredDAT({ onlyBad: true }, parentsToCandidates, 2); }); - it('no games are bad', async () => { + it('no games are bad', () => { const parentsToCandidates = [ buildGameWithRegionLanguage('one', 'USA', 'EN'), buildGameWithRegionLanguage('two', 'USA', 'EN'), ]; - await expectFilteredDAT({ noBad: true }, parentsToCandidates, 2); - await expectFilteredDAT({ onlyBad: true }, parentsToCandidates, 0); + expectFilteredDAT({ noBad: true }, parentsToCandidates, 2); + expectFilteredDAT({ onlyBad: true }, parentsToCandidates, 0); }); }); }); diff --git a/test/modules/datGameInferrer.test.ts b/test/modules/datGameInferrer.test.ts index 6e4ec9882..a78726893 100644 --- a/test/modules/datGameInferrer.test.ts +++ b/test/modules/datGameInferrer.test.ts @@ -1,13 +1,21 @@ import DATGameInferrer from '../../src/modules/datGameInferrer.js'; import ROMScanner from '../../src/modules/romScanner.js'; +import FileCache from '../../src/types/files/fileCache.js'; +import FileFactory from '../../src/types/files/fileFactory.js'; import Options from '../../src/types/options.js'; import ProgressBarFake from '../console/progressBarFake.js'; test.each([ // One input path - [['test/fixtures/roms/**/*'], { roms: 28 }], + [['test/fixtures/roms/**/*'], { roms: 35 }], [['test/fixtures/roms/7z/*'], { '7z': 5 }], + [['test/fixtures/roms/chd/*'], { chd: 4 }], + [['test/fixtures/roms/cso/*'], { cso: 1 }], + [['test/fixtures/roms/discs/*'], { discs: 3 }], [['test/fixtures/roms/gz/*'], { gz: 7 }], + [['test/fixtures/roms/headered/*'], { headered: 6 }], + [['test/fixtures/roms/headerless/*'], { headerless: 1 }], + [['test/fixtures/roms/nkit/*'], { nkit: 1 }], [['test/fixtures/roms/rar/*'], { rar: 5 }], [['test/fixtures/roms/raw/*'], { raw: 10 }], [['test/fixtures/roms/tar/*'], { tar: 5 }], @@ -25,10 +33,14 @@ test.each([ ])('should infer DATs: %s', async (input, expected) => { // Given const options = new Options({ input }); - const romFiles = await new ROMScanner(options, new ProgressBarFake()).scan(); + const romFiles = await new ROMScanner( + options, + new ProgressBarFake(), + new FileFactory(new FileCache()), + ).scan(); // When - const dats = new DATGameInferrer(options, new ProgressBarFake()).infer(romFiles); + const dats = await new DATGameInferrer(options, new ProgressBarFake()).infer(romFiles); // Then const datNameToGameCount = Object.fromEntries( diff --git a/test/modules/datMergerSplitter.test.ts b/test/modules/datMergerSplitter.test.ts index 9e4be81c7..1ccab5543 100644 --- a/test/modules/datMergerSplitter.test.ts +++ b/test/modules/datMergerSplitter.test.ts @@ -1,6 +1,7 @@ import 'jest-extended'; import DATMergerSplitter from '../../src/modules/datMergerSplitter.js'; +import Disk from '../../src/types/dats/disk.js'; import Game from '../../src/types/dats/game.js'; import Header from '../../src/types/dats/logiqx/header.js'; import LogiqxDAT from '../../src/types/dats/logiqx/logiqxDat.js'; @@ -10,13 +11,13 @@ import ROM from '../../src/types/dats/rom.js'; import Options, { MergeMode } from '../../src/types/options.js'; import ProgressBarFake from '../console/progressBarFake.js'; -it('should do nothing if no parent/clone info is present', async () => { +it('should do nothing if no parent/clone info is present', () => { // Given const options = new Options({ mergeRoms: undefined }); const dat = new LogiqxDAT(new Header(), []); // When - const result = await new DATMergerSplitter(options, new ProgressBarFake()).merge(dat); + const result = new DATMergerSplitter(options, new ProgressBarFake()).merge(dat); // Then the original DAT was returned expect(result).toEqual(dat); @@ -26,13 +27,13 @@ test.each( Object.keys(MergeMode) .filter((mode) => Number.isNaN(Number(mode))) .map((mode) => [mode.toLowerCase()]), -)('should do nothing if no parent/clone info is present: %s', async (mergeRoms) => { +)('should do nothing if no parent/clone info is present: %s', (mergeRoms) => { // Given const options = new Options({ mergeRoms }); const dat = new LogiqxDAT(new Header(), []); // When - const result = await new DATMergerSplitter(options, new ProgressBarFake()).merge(dat); + const result = new DATMergerSplitter(options, new ProgressBarFake()).merge(dat); // Then the original DAT was returned expect(result).toEqual(dat); @@ -627,6 +628,50 @@ describe('MAME v0.258', () => { ], }), + new Machine({ + // Game with two disks + name: '2spicy', + romOf: 'lindbios', + description: '2 Spicy', + rom: [ + new ROM({ name: '317-0491-com.bin', size: 8192, status: 'nodump' }), + new ROM({ name: '6.0.0009.bin', merge: '6.0.0009.bin', size: 1048576, crc32: '5ffdfbf8', sha1: '605bc4967b749b4e6d13fc2ebb845ba956a259a7' }), + new ROM({ name: '6.0.0010.bin', merge: '6.0.0010.bin', size: 1048576, crc32: 'ea2bf888', sha1: 'c9c5b6f0d4f4f36620939b15dd2f128a74347e37' }), + new ROM({ name: '6.0.0010a.bin', merge: '6.0.0010a.bin', size: 1048576, crc32: '10dd9b76', sha1: '1fdf1f921bc395846a7c3180fbdbc4ca287a9670' }), + new ROM({ name: 'fpr-24370b.ic6', merge: 'fpr-24370b.ic6', size: 4194304, crc32: 'c3b021a4', sha1: '1b6938a50fe0e4ae813864649eb103838c399ac0' }), + new ROM({ name: 'vid_bios.u504', merge: 'vid_bios.u504', size: 65536, crc32: 'f78d14d7', sha1: 'f129787e487984edd23bf344f2e9500c85052275' }), + ], + disk: [ + new Disk({ name: 'dvp-0027a', sha1: 'da1aacee9e32e813844f4d434981e69cc5c80682' }), + new Disk({ name: 'mda-c0004a_revb_lindyellow_v2.4.20_mvl31a_boot_2.01', merge: 'mda-c0004a_revb_lindyellow_v2.4.20_mvl31a_boot_2.01', sha1: 'e13da5f827df852e742b594729ee3f933b387410' }), + ], + deviceRef: [ + new DeviceRef('pentium4'), + new DeviceRef('pci_root'), + new DeviceRef('i82875p_host'), + new DeviceRef('i82875p_agp'), + new DeviceRef('geforce_7600gs'), + new DeviceRef('i82875p_overflow'), + new DeviceRef('pci_bridge'), + new DeviceRef('i82541_device'), + new DeviceRef('usb_uhci'), + new DeviceRef('usb_uhci'), + new DeviceRef('i6300esb_watchdog'), + new DeviceRef('apic'), + new DeviceRef('usb_ehci'), + new DeviceRef('pci_bridge'), + new DeviceRef('sb0400'), + new DeviceRef('lindbergh_baseboard'), + new DeviceRef('i6300esb_lpc'), + new DeviceRef('lpc_acpi'), + new DeviceRef('lpc_rpc'), + new DeviceRef('lpc_pit'), + new DeviceRef('sata'), + new DeviceRef('smbus'), + new DeviceRef('ac97'), + ], + }), + new Machine({ // Game with BIOS files but no romOf BIOS parent name: 'aes', @@ -671,6 +716,77 @@ describe('MAME v0.258', () => { ], }), + new Machine({ + // Game with one disk + name: 'area51mx', + description: 'Area 51 / Maximum Force Duo v2.0', + rom: [ + new ROM({ name: '2.0_68020_max-a51_kit_3h.3h', size: 524288, crc32: '47cbf30b', sha1: '23377bcc65c0fc330d5bc7e76e233bae043ac364' }), + new ROM({ name: '2.0_68020_max-a51_kit_3k.3k', size: 524288, crc32: '0e78f308', sha1: 'adc4c8e441eb8fe525d0a6220eb3a2a8791a7289' }), + new ROM({ name: '2.0_68020_max-a51_kit_3m.3m', size: 524288, crc32: 'd800ac17', sha1: '3d515c8608d8101ee9227116175b3c3f1fe22e0c' }), + new ROM({ name: '2.0_68020_max-a51_kit_3p.3p', size: 524288, crc32: 'a3c93684', sha1: 'f6b3357bb69900a176fd6bc6b819b2f57b7d0f59' }), + new ROM({ name: 'jagwave.rom', size: 4096, crc32: '7a25ee5b', sha1: '58117e11fd6478c521fbd3fdbe157f39567552f0' }), + ], + disk: [ + new Disk({ name: 'area51mx', sha1: '5ff10f4e87094d4449eabf3de7549564ca568c7e' }), + ], + deviceRef: [ + new DeviceRef('m68ec020'), + new DeviceRef('jaguargpu'), + new DeviceRef('jaguardsp'), + new DeviceRef('jag_blitter'), + new DeviceRef('nvram'), + new DeviceRef('watchdog'), + new DeviceRef('vt83c461'), + new DeviceRef('ata_slot'), + new DeviceRef('cojag_hdd'), + new DeviceRef('harddisk_image'), + new DeviceRef('ata_slot'), + new DeviceRef('screen'), + new DeviceRef('palette'), + new DeviceRef('speaker'), + new DeviceRef('speaker'), + new DeviceRef('dac_16bit_r2r_tc'), + new DeviceRef('dac_16bit_r2r_tc'), + ], + }), + new Machine({ + // Clone of a game with the same disk as its parent + name: 'a51mxr3k', + cloneOf: 'area51mx', + romOf: 'area51mx', + description: 'Area 51 / Maximum Force Duo (R3000, 2/10/98)', + rom: [ + new ROM({ name: '1.0_r3k_max-a51_kit_hh.hh', size: 524288, crc32: 'a984dab2', sha1: 'debb3bc11ff49e87a52e89a69533a1bab7db700e' }), + new ROM({ name: '1.0_r3k_max-a51_kit_hl.hl', size: 524288, crc32: '0af49d74', sha1: 'c19f26056a823fd32293e9a7b3ea868640eabf49' }), + new ROM({ name: '1.0_r3k_max-a51_kit_lh.lh', size: 524288, crc32: 'd7d94dac', sha1: '2060a74715f36a0d7f5dd0855eda48ad1f20f095' }), + new ROM({ name: '1.0_r3k_max-a51_kit_ll.ll', size: 524288, crc32: 'ece9e5ae', sha1: '7e44402726f5afa6d1670b27aa43ad13d21c4ad9' }), + new ROM({ name: 'jagwave.rom', merge: 'jagwave.rom', size: 4096, crc32: '7a25ee5b', sha1: '58117e11fd6478c521fbd3fdbe157f39567552f0' }), + ], + disk: [ + new Disk({ name: 'area51mx', merge: 'area51mx', sha1: '5ff10f4e87094d4449eabf3de7549564ca568c7e' }), + ], + deviceRef: [ + new DeviceRef('r3041'), + new DeviceRef('jaguargpu'), + new DeviceRef('jaguardsp'), + new DeviceRef('jag_blitter'), + new DeviceRef('nvram'), + new DeviceRef('watchdog'), + new DeviceRef('vt83c461'), + new DeviceRef('ata_slot'), + new DeviceRef('cojag_hdd'), + new DeviceRef('harddisk_image'), + new DeviceRef('ata_slot'), + new DeviceRef('screen'), + new DeviceRef('palette'), + new DeviceRef('speaker'), + new DeviceRef('speaker'), + new DeviceRef('dac_16bit_r2r_tc'), + new DeviceRef('dac_16bit_r2r_tc'), + ], + }), + new Machine({ // Game with no clones name: 'bbtime', @@ -1357,6 +1473,46 @@ describe('MAME v0.258', () => { new DeviceRef('palette'), ], }), + new Machine({ + name: 'lindbios', + bios: 'yes', + description: 'Sega Lindbergh BIOS', + rom: [ + new ROM({ name: '6.0.0009.bin', size: 1048576, crc32: '5ffdfbf8', sha1: '605bc4967b749b4e6d13fc2ebb845ba956a259a7' }), + new ROM({ name: '6.0.0010.bin', size: 1048576, crc32: 'ea2bf888', sha1: 'c9c5b6f0d4f4f36620939b15dd2f128a74347e37' }), + new ROM({ name: '6.0.0010a.bin', size: 1048576, crc32: '10dd9b76', sha1: '1fdf1f921bc395846a7c3180fbdbc4ca287a9670' }), + new ROM({ name: 'fpr-24370b.ic6', size: 4194304, crc32: 'c3b021a4', sha1: '1b6938a50fe0e4ae813864649eb103838c399ac0' }), + new ROM({ name: 'vid_bios.u504', size: 65536, crc32: 'f78d14d7', sha1: 'f129787e487984edd23bf344f2e9500c85052275' }), + ], + disk: [ + new Disk({ name: 'mda-c0004a_revb_lindyellow_v2.4.20_mvl31a_boot_2.01', sha1: 'e13da5f827df852e742b594729ee3f933b387410' }), + ], + deviceRef: [ + new DeviceRef('ac97'), + new DeviceRef('pci_root'), + new DeviceRef('i82875p_host'), + new DeviceRef('i82875p_agp'), + new DeviceRef('geforce_7600gs'), + new DeviceRef('i82875p_overflow'), + new DeviceRef('pci_bridge'), + new DeviceRef('i82541_device'), + new DeviceRef('usb_uhci'), + new DeviceRef('usb_uhci'), + new DeviceRef('i6300esb_watchdog'), + new DeviceRef('apic'), + new DeviceRef('usb_ehci'), + new DeviceRef('pci_bridge'), + new DeviceRef('sb0400'), + new DeviceRef('lindbergh_baseboard'), + new DeviceRef('i6300esb_lpc'), + new DeviceRef('lpc_acpi'), + new DeviceRef('lpc_rpc'), + new DeviceRef('lpc_pit'), + new DeviceRef('sata'), + new DeviceRef('smbus'), + new DeviceRef('ac97'), + ], + }), // ***** Devices ***** new Machine({ name: '93c46_16', device: 'yes' }), @@ -1555,14 +1711,14 @@ describe('MAME v0.258', () => { new Machine({ name: 'z80', device: 'yes' }), ]); - it('should full-non-merged', async () => { + it('should full-non-merged', () => { // Given const options = new Options({ mergeRoms: MergeMode[MergeMode.FULLNONMERGED].toLowerCase(), }); // When - const result = await new DATMergerSplitter(options, new ProgressBarFake()).merge(dat); + const result = new DATMergerSplitter(options, new ProgressBarFake()).merge(dat); // Then nothing was merged expect(result.getParents()).toHaveLength(dat.getParents().length); @@ -1574,8 +1730,14 @@ describe('MAME v0.258', () => { return map; }, new Map()); + const gameNamesToDiskNames = result.getGames() + .reduce((map, game) => { + map.set(game.getName(), game.getDisks().map((disk) => disk.getName().replace(/[\\/]/g, '\\'))); + return map; + }, new Map()); + // Includes BIOS files - expect(gameNamesToRomNames.get('100lions')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('100lions')).toIncludeSameMembers([ '01.02.08_left.u3', '01.02.08_right.u2', '01.03.03a_left.u70', '01.03.03a_right.u83', '01.03.03e_left.u70', '01.03.03e_right.u83', '01.03.05_left.u70', '01.03.05_right.u83', '01.03.06_left.u70', '01.03.06_right.u83', '01.03.07_left.u70', '01.03.07_right.u83', @@ -1600,7 +1762,7 @@ describe('MAME v0.258', () => { '21012901_left.u70', '21012901_right.u83', '24010467_left.u70', '24010467_right.u83', '24013001_left.u70', '24013001_right.u83', '25012805_left.u70', '25012805_right.u83', ]); - expect(gameNamesToRomNames.get('100lionsa')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('100lionsa')).toIncludeSameMembers([ '01.02.08_left.u3', '01.02.08_right.u2', '01.03.03a_left.u70', '01.03.03a_right.u83', '01.03.03e_left.u70', '01.03.03e_right.u83', '01.03.05_left.u70', '01.03.05_right.u83', '01.03.06_left.u70', '01.03.06_right.u83', '01.03.07_left.u70', '01.03.07_right.u83', @@ -1625,53 +1787,56 @@ describe('MAME v0.258', () => { '21012901_right.u83', '24010467_left.u70', '24010467_right.u83', '24013001_left.u70', '24013001_right.u83', '25012805_left.u70', '25012805_right.u83', '30223811.u73', '30223811.u86', ]); - expect(gameNamesToRomNames.get('1942')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('1942')).toIncludeSameMembers([ 'sb-0.f1', 'sb-1.k6', 'sb-2.d1', 'sb-3.d2', 'sb-4.d6', 'sb-5.e8', 'sb-6.e9', 'sb-7.e10', 'sb-8.k3', 'sb-9.m11', 'sr-01.c11', 'sr-02.f2', 'sr-08.a1', 'sr-09.a2', 'sr-10.a3', 'sr-11.a4', 'sr-12.a5', 'sr-13.a6', 'sr-14.l1', 'sr-15.l2', 'sr-16.n1', 'sr-17.n2', 'srb-03.m3', 'srb-04.m4', 'srb-05.m5', 'srb-06.m6', 'srb-07.m7', ]); - expect(gameNamesToRomNames.get('1942a')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('1942a')).toIncludeSameMembers([ 'sb-0.f1', 'sb-1.k6', 'sb-2.d1', 'sb-3.d2', 'sb-4.d6', 'sb-5.e8', 'sb-6.e9', 'sb-7.e10', 'sb-8.k3', 'sb-9.m11', 'sr-01.c11', 'sr-02.f2', 'sr-04.m4', 'sr-05.m5', 'sr-06.m6', 'sr-07.m7', 'sr-08.a1', 'sr-09.a2', 'sr-10.a3', 'sr-11.a4', 'sr-12.a5', 'sr-13.a6', 'sr-14.l1', 'sr-15.l2', 'sr-16.n1', 'sr-17.n2', 'sra-03.m3', ]); - expect(gameNamesToRomNames.get('1942abl')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('1942abl')).toIncludeSameMembers([ '1.bin', '2.bin', '3.bin', '5.bin', '7.bin', '9.bin', '11.bin', '13.bin', '14.bin', '16.bin', 'sb-0.f1', 'sb-1.k6', 'sb-2.d1', 'sb-3.d2', 'sb-4.d6', 'sb-5.e8', 'sb-6.e9', 'sb-7.e10', 'sb-8.k3', 'sb-9.m11', ]); - expect(gameNamesToRomNames.get('1942b')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('1942b')).toIncludeSameMembers([ 'sb-0.f1', 'sb-1.k6', 'sb-2.d1', 'sb-3.d2', 'sb-4.d6', 'sb-5.e8', 'sb-6.e9', 'sb-7.e10', 'sb-8.k3', 'sb-9.m11', 'sr-01.c11', 'sr-02.f2', 'sr-03.m3', 'sr-04.m4', 'sr-05.m5', 'sr-06.m6', 'sr-07.m7', 'sr-08.a1', 'sr-09.a2', 'sr-10.a3', 'sr-11.a4', 'sr-12.a5', 'sr-13.a6', 'sr-14.l1', 'sr-15.l2', 'sr-16.n1', 'sr-17.n2', ]); - expect(gameNamesToRomNames.get('1942h')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('1942h')).toIncludeSameMembers([ 'sb-0.f1', 'sb-1.k6', 'sb-2.d1', 'sb-3.d2', 'sb-4.d6', 'sb-5.e8', 'sb-6.e9', 'sb-7.e10', 'sb-8.k3', 'sb-9.m11', 'sr-01.c11', 'sr-02.f2', 'sr-08.a1', 'sr-09.a2', 'sr-10.a3', 'sr-11.a4', 'sr-12.a5', 'sr-13.a6', 'sr-14.l1', 'sr-15.l2', 'sr-16.n1', 'sr-17.n2', 'srb-06.m6', 'srb-07.m7', 'supercharger_1942_@3.m3', 'supercharger_1942_@4.m4', 'supercharger_1942_@5.m5', ]); - expect(gameNamesToRomNames.get('1942p')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('1942p')).toIncludeSameMembers([ '1.bin', '2.bin', '3.bin', '04.bin', '5.bin', '6.bin', '7.bin', '8.bin', '9.bin', '10.bin', '11.bin', '12.bin', 'ic22.bin', ]); - expect(gameNamesToRomNames.get('1942w')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('1942w')).toIncludeSameMembers([ 'sb-0.f1', 'sb-1.k6', 'sb-2.d1', 'sb-3.d2', 'sb-4.d6', 'sb-5.e8', 'sb-6.e9', 'sb-7.e10', 'sb-8.k3', 'sb-9.m11', 'sr-01.c11', 'sr-08.a1', 'sr-09.a2', 'sr-10.a3', 'sr-11.a4', 'sr-12.a5', 'sr-13.a6', 'sr-14.l1', 'sr-15.l2', 'sr-16.n1', 'sr-17.n2', 'sw-02.f2', 'sw-03.m3', 'sw-04.m4', 'sw-05.m5', 'sw-06.m6', 'sw-07.m7', ]); - expect(gameNamesToRomNames.get('aes')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('2spicy')).toIncludeSameMembers([ + '6.0.0009.bin', '6.0.0010.bin', '6.0.0010a.bin', 'fpr-24370b.ic6', 'vid_bios.u504', + ]); + expect(gameNamesToRomNames.get('aes')).toIncludeSameMembers([ '000-lo.lo', 'neo-epo.bin', 'neo-po.bin', 'neodebug.rom', 'uni-bios_1_3.rom', 'uni-bios_2_0.rom', 'uni-bios_2_1.rom', 'uni-bios_2_2.rom', 'uni-bios_2_3.rom', 'uni-bios_2_3o.rom', 'uni-bios_3_0.rom', 'uni-bios_3_1.rom', 'uni-bios_3_2.rom', 'uni-bios_3_3.rom', 'uni-bios_4_0.rom', ]); - expect(gameNamesToRomNames.get('bbtime')).toIncludeAllMembers(['bbtime.svg', 'hd38820a65']); - expect(gameNamesToRomNames.get('c64')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('bbtime')).toIncludeSameMembers(['bbtime.svg', 'hd38820a65']); + expect(gameNamesToRomNames.get('c64')).toIncludeSameMembers([ '325302-01.uab4', '901225-01.u5', '901226-01.u3', '901227-01.u4', '901227-02.u4', '901227-03.u4', '901229-01.uab5', '901229-02.uab5', '901229-03.uab5', '901229-05 ae.uab5', '901229-06 aa.uab5', '906114-01.u17', 'digidos.u4', 'digidos.uab5', 'dosrom12.u4', 'exos3.u4', @@ -1682,79 +1847,90 @@ describe('MAME v0.258', () => { 'turboaccess26.u4', 'turboaccess301.u4', 'turboaccess302.u4', 'turboprocess.u4', 'turboprocessus.u4', 'turborom2.u4', 'turborom.u4', ]); - expect(gameNamesToRomNames.get('ddonpach')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('ddonpach')).toIncludeSameMembers([ 'b1.u27', 'b2.u26', 'eeprom-ddonpach.bin', 'u6.bin', 'u7.bin', 'u50.bin', 'u51.bin', 'u52.bin', 'u53.bin', 'u60.bin', 'u61.bin', 'u62.bin', ]); - expect(gameNamesToRomNames.get('ddonpacha')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('ddonpacha')).toIncludeSameMembers([ 'arrange_u26.bin', 'arrange_u27.bin', 'arrange_u51.bin', 'arrange_u62.bin', 'eeprom-ddonpach.bin', 'u6.bin', 'u7.bin', 'u50.bin', 'u52.bin', 'u53.bin', 'u60.bin', 'u61.bin', ]); - expect(gameNamesToRomNames.get('ddonpachj')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('ddonpachj')).toIncludeSameMembers([ 'eeprom-ddonpach.bin', 'u6.bin', 'u7.bin', 'u26.bin', 'u27.bin', 'u50.bin', 'u51.bin', 'u52.bin', 'u53.bin', 'u60.bin', 'u61.bin', 'u62.bin', ]); + // Includes device ROMs - expect(gameNamesToRomNames.get('galaga')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('area51mx')).toIncludeSameMembers([ + '2.0_68020_max-a51_kit_3h.3h', '2.0_68020_max-a51_kit_3k.3k', '2.0_68020_max-a51_kit_3m.3m', + '2.0_68020_max-a51_kit_3p.3p', 'jagwave.rom', + ]); + expect(gameNamesToDiskNames.get('area51mx')).toIncludeSameMembers(['area51mx']); + expect(gameNamesToRomNames.get('a51mxr3k')).toIncludeSameMembers([ + '1.0_r3k_max-a51_kit_hh.hh', '1.0_r3k_max-a51_kit_hl.hl', '1.0_r3k_max-a51_kit_lh.lh', + '1.0_r3k_max-a51_kit_ll.ll', 'jagwave.rom', + ]); + expect(gameNamesToDiskNames.get('a51mxr3k')).toIncludeSameMembers(['area51mx']); + expect(gameNamesToRomNames.get('galaga')).toIncludeSameMembers([ '51xx.bin', '54xx.bin', 'gg1_1b.3p', 'gg1_2b.3m', 'gg1_3.2m', 'gg1_4b.2l', 'gg1_5b.3f', 'gg1_7b.2c', 'gg1_9.4l', 'gg1_10.4f', 'gg1_11.4d', 'prom-1.1d', 'prom-2.5c', 'prom-3.1c', 'prom-4.2n', 'prom-5.5n', ]); - expect(gameNamesToRomNames.get('galagamf')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('galagamf')).toIncludeSameMembers([ '51xx.bin', '54xx.bin', '2600j.bin', '2700k.bin', '2800l.bin', '3200a.bin', '3300b.bin', '3400c.bin', '3500d.bin', '3600fast.bin', '3700g.bin', 'prom-1.1d', 'prom-2.5c', 'prom-3.1c', 'prom-4.2n', 'prom-5.5n', ]); - expect(gameNamesToRomNames.get('galagamk')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('galagamk')).toIncludeSameMembers([ '51xx.bin', '54xx.bin', '3400c.bin', 'gg1-5.3f', 'gg1-7b.2c', 'gg1-9.4l', 'gg1-10.4f', 'gg1-11.4d', 'mk2-1', 'mk2-2', 'mk2-4', 'prom-1.1d', 'prom-2.5c', 'prom-3.1c', 'prom-4.2n', 'prom-5.5n', ]); - expect(gameNamesToRomNames.get('galagamw')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('galagamw')).toIncludeSameMembers([ '51xx.bin', '54xx.bin', '2600j.bin', '2700k.bin', '2800l.bin', '3200a.bin', '3300b.bin', '3400c.bin', '3500d.bin', '3600e.bin', '3700g.bin', 'prom-1.1d', 'prom-2.5c', 'prom-3.1c', 'prom-4.2n', 'prom-5.5n', ]); - expect(gameNamesToRomNames.get('galagao')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('galagao')).toIncludeSameMembers([ '51xx.bin', '54xx.bin', 'gg1-1.3p', 'gg1-2.3m', 'gg1-3.2m', 'gg1-4.2l', 'gg1-5.3f', 'gg1-7.2c', 'gg1-9.4l', 'gg1-10.4f', 'gg1-11.4d', 'prom-1.1d', 'prom-2.5c', 'prom-3.1c', 'prom-4.2n', 'prom-5.5n', ]); - expect(gameNamesToRomNames.get('gallag')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('gallag')).toIncludeSameMembers([ '51xx.bin', 'gallag.1', 'gallag.2', 'gallag.3', 'gallag.4', 'gallag.5', 'gallag.6', 'gallag.7', 'gallag.8', 'gallag.9', 'gallag.a', 'prom-1.1d', 'prom-2.5c', 'prom-3.1c', 'prom-4.2n', 'prom-5.5n', ]); - expect(gameNamesToRomNames.get('gatsbee')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('gatsbee')).toIncludeSameMembers([ '1.4b', '2.4c', '3.4d', '4.4e', '8.5r', '9.6a', '10.7a', '51xx.bin', '54xx.bin', 'gallag.6', 'gg1-5.3f', 'gg1-7.2c', 'prom-1.1d', 'prom-2.5c', 'prom-3.1c', 'prom-4.2n', 'prom-5.5n', ]); - expect(gameNamesToRomNames.get('nebulbee')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('nebulbee')).toIncludeSameMembers([ '1c.bin', '1d.bin', '2n.bin', '5c.bin', '51xx.bin', 'gg1-5', 'gg1-7', 'gg1_3.2m', 'gg1_9.4l', 'gg1_10.4f', 'gg1_11.4d', 'nebulbee.01', 'nebulbee.02', 'nebulbee.04', 'nebulbee.07', 'prom-5.5n', ]); - expect(gameNamesToRomNames.get('liblrabl')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('liblrabl')).toIncludeSameMembers([ '2c.rom', '5b.rom', '5c.rom', '5p.rom', '8c.rom', '9t.rom', '10c.rom', 'lr1-1.1t', 'lr1-2.1s', 'lr1-3.1r', 'lr1-4.3d', 'lr1-5.5l', 'lr1-6.2p', ]); // No change to BIOS or devices - expect(result.getGames().filter((game) => game.isBios())).toHaveLength(2); + expect(result.getGames().filter((game) => game.isBios())).toHaveLength(3); expect(result.getGames().filter((game) => game.isDevice())).toHaveLength(65); expect(gameNamesToRomNames.get('aristmk6')).toHaveLength(96); expect(gameNamesToRomNames.get('neogeo')).toHaveLength(34); }); - it('should non-merged', async () => { + it('should non-merged', () => { // Given const options = new Options({ mergeRoms: MergeMode[MergeMode.NONMERGED].toLowerCase(), }); // When - const result = await new DATMergerSplitter(options, new ProgressBarFake()).merge(dat); + const result = new DATMergerSplitter(options, new ProgressBarFake()).merge(dat); // Then nothing was merged expect(result.getParents()).toHaveLength(dat.getParents().length); @@ -1766,56 +1942,73 @@ describe('MAME v0.258', () => { return map; }, new Map()); + const gameNamesToDiskNames = result.getGames() + .reduce((map, game) => { + map.set(game.getName(), game.getDisks().map((disk) => disk.getName().replace(/[\\/]/g, '\\'))); + return map; + }, new Map()); + // Excludes device files - expect(gameNamesToRomNames.get('100lions')).toIncludeAllMembers(['10219211.u73', '10219211.u86']); - expect(gameNamesToRomNames.get('100lionsa')).toIncludeAllMembers(['30223811.u73', '30223811.u86']); - expect(gameNamesToRomNames.get('1942')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('100lions')).toIncludeSameMembers(['10219211.u73', '10219211.u86']); + expect(gameNamesToRomNames.get('100lionsa')).toIncludeSameMembers(['30223811.u73', '30223811.u86']); + expect(gameNamesToRomNames.get('1942')).toIncludeSameMembers([ 'sb-0.f1', 'sb-1.k6', 'sb-2.d1', 'sb-3.d2', 'sb-4.d6', 'sb-5.e8', 'sb-6.e9', 'sb-7.e10', 'sb-8.k3', 'sb-9.m11', 'sr-01.c11', 'sr-02.f2', 'sr-08.a1', 'sr-09.a2', 'sr-10.a3', 'sr-11.a4', 'sr-12.a5', 'sr-13.a6', 'sr-14.l1', 'sr-15.l2', 'sr-16.n1', 'sr-17.n2', 'srb-03.m3', 'srb-04.m4', 'srb-05.m5', 'srb-06.m6', 'srb-07.m7', ]); - expect(gameNamesToRomNames.get('1942a')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('1942a')).toIncludeSameMembers([ 'sb-0.f1', 'sb-1.k6', 'sb-2.d1', 'sb-3.d2', 'sb-4.d6', 'sb-5.e8', 'sb-6.e9', 'sb-7.e10', 'sb-8.k3', 'sb-9.m11', 'sr-01.c11', 'sr-02.f2', 'sr-04.m4', 'sr-05.m5', 'sr-06.m6', 'sr-07.m7', 'sr-08.a1', 'sr-09.a2', 'sr-10.a3', 'sr-11.a4', 'sr-12.a5', 'sr-13.a6', 'sr-14.l1', 'sr-15.l2', 'sr-16.n1', 'sr-17.n2', 'sra-03.m3', ]); - expect(gameNamesToRomNames.get('1942abl')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('1942abl')).toIncludeSameMembers([ '1.bin', '2.bin', '3.bin', '5.bin', '7.bin', '9.bin', '11.bin', '13.bin', '14.bin', '16.bin', 'sb-0.f1', 'sb-1.k6', 'sb-2.d1', 'sb-3.d2', 'sb-4.d6', 'sb-5.e8', 'sb-6.e9', 'sb-7.e10', 'sb-8.k3', 'sb-9.m11', ]); - expect(gameNamesToRomNames.get('1942b')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('1942b')).toIncludeSameMembers([ 'sb-0.f1', 'sb-1.k6', 'sb-2.d1', 'sb-3.d2', 'sb-4.d6', 'sb-5.e8', 'sb-6.e9', 'sb-7.e10', 'sb-8.k3', 'sb-9.m11', 'sr-01.c11', 'sr-02.f2', 'sr-03.m3', 'sr-04.m4', 'sr-05.m5', 'sr-06.m6', 'sr-07.m7', 'sr-08.a1', 'sr-09.a2', 'sr-10.a3', 'sr-11.a4', 'sr-12.a5', 'sr-13.a6', 'sr-14.l1', 'sr-15.l2', 'sr-16.n1', 'sr-17.n2', ]); - expect(gameNamesToRomNames.get('1942h')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('1942h')).toIncludeSameMembers([ 'sb-0.f1', 'sb-1.k6', 'sb-2.d1', 'sb-3.d2', 'sb-4.d6', 'sb-5.e8', 'sb-6.e9', 'sb-7.e10', 'sb-8.k3', 'sb-9.m11', 'sr-01.c11', 'sr-02.f2', 'sr-08.a1', 'sr-09.a2', 'sr-10.a3', 'sr-11.a4', 'sr-12.a5', 'sr-13.a6', 'sr-14.l1', 'sr-15.l2', 'sr-16.n1', 'sr-17.n2', 'srb-06.m6', 'srb-07.m7', 'supercharger_1942_@3.m3', 'supercharger_1942_@4.m4', 'supercharger_1942_@5.m5', ]); - expect(gameNamesToRomNames.get('1942p')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('1942p')).toIncludeSameMembers([ '1.bin', '2.bin', '3.bin', '04.bin', '5.bin', '6.bin', '7.bin', '8.bin', '9.bin', '10.bin', '11.bin', '12.bin', 'ic22.bin', ]); - expect(gameNamesToRomNames.get('1942w')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('1942w')).toIncludeSameMembers([ 'sb-0.f1', 'sb-1.k6', 'sb-2.d1', 'sb-3.d2', 'sb-4.d6', 'sb-5.e8', 'sb-6.e9', 'sb-7.e10', 'sb-8.k3', 'sb-9.m11', 'sr-01.c11', 'sr-08.a1', 'sr-09.a2', 'sr-10.a3', 'sr-11.a4', 'sr-12.a5', 'sr-13.a6', 'sr-14.l1', 'sr-15.l2', 'sr-16.n1', 'sr-17.n2', 'sw-02.f2', 'sw-03.m3', 'sw-04.m4', 'sw-05.m5', 'sw-06.m6', 'sw-07.m7', ]); - expect(gameNamesToRomNames.get('aes')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('2spicy')).toIncludeSameMembers([]); + expect(gameNamesToRomNames.get('aes')).toIncludeSameMembers([ '000-lo.lo', 'neo-epo.bin', 'neo-po.bin', 'neodebug.rom', 'uni-bios_1_3.rom', 'uni-bios_2_0.rom', 'uni-bios_2_1.rom', 'uni-bios_2_2.rom', 'uni-bios_2_3.rom', 'uni-bios_2_3o.rom', 'uni-bios_3_0.rom', 'uni-bios_3_1.rom', 'uni-bios_3_2.rom', 'uni-bios_3_3.rom', 'uni-bios_4_0.rom', ]); - expect(gameNamesToRomNames.get('bbtime')).toIncludeAllMembers(['bbtime.svg', 'hd38820a65']); - expect(gameNamesToRomNames.get('c64')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('area51mx')).toIncludeSameMembers([ + '2.0_68020_max-a51_kit_3h.3h', '2.0_68020_max-a51_kit_3k.3k', '2.0_68020_max-a51_kit_3m.3m', + '2.0_68020_max-a51_kit_3p.3p', 'jagwave.rom', + ]); + expect(gameNamesToDiskNames.get('area51mx')).toIncludeSameMembers(['area51mx']); + expect(gameNamesToRomNames.get('a51mxr3k')).toIncludeSameMembers([ + '1.0_r3k_max-a51_kit_hh.hh', '1.0_r3k_max-a51_kit_hl.hl', '1.0_r3k_max-a51_kit_lh.lh', + '1.0_r3k_max-a51_kit_ll.ll', 'jagwave.rom', + ]); + expect(gameNamesToDiskNames.get('a51mxr3k')).toIncludeSameMembers(['area51mx']); + expect(gameNamesToRomNames.get('bbtime')).toIncludeSameMembers(['bbtime.svg', 'hd38820a65']); + expect(gameNamesToRomNames.get('c64')).toIncludeSameMembers([ '901225-01.u5', '901226-01.u3', '901227-01.u4', '901227-02.u4', '901227-03.u4', '906114-01.u17', 'digidos.u4', 'dosrom12.u4', 'exos3.u4', 'exos4.u4', 'jiffydos c64.u4', 'kernal-10-mager.u4', 'kernal-20-1.u4', 'kernal-20-1_au.u4', 'kernal-20-2.u4', 'kernal-20-3.u4', 'kernal-30.u4', @@ -1824,71 +2017,71 @@ describe('MAME v0.258', () => { 'turboaccess301.u4', 'turboaccess302.u4', 'turboprocess.u4', 'turboprocessus.u4', 'turborom2.u4', 'turborom.u4', ]); - expect(gameNamesToRomNames.get('ddonpach')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('ddonpach')).toIncludeSameMembers([ 'b1.u27', 'b2.u26', 'eeprom-ddonpach.bin', 'u6.bin', 'u7.bin', 'u50.bin', 'u51.bin', 'u52.bin', 'u53.bin', 'u60.bin', 'u61.bin', 'u62.bin', ]); - expect(gameNamesToRomNames.get('ddonpacha')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('ddonpacha')).toIncludeSameMembers([ 'arrange_u26.bin', 'arrange_u27.bin', 'arrange_u51.bin', 'arrange_u62.bin', 'eeprom-ddonpach.bin', 'u6.bin', 'u7.bin', 'u50.bin', 'u52.bin', 'u53.bin', 'u60.bin', 'u61.bin', ]); - expect(gameNamesToRomNames.get('ddonpachj')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('ddonpachj')).toIncludeSameMembers([ 'eeprom-ddonpach.bin', 'u6.bin', 'u7.bin', 'u26.bin', 'u27.bin', 'u50.bin', 'u51.bin', 'u52.bin', 'u53.bin', 'u60.bin', 'u61.bin', 'u62.bin', ]); - expect(gameNamesToRomNames.get('galaga')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('galaga')).toIncludeSameMembers([ 'gg1_1b.3p', 'gg1_2b.3m', 'gg1_3.2m', 'gg1_4b.2l', 'gg1_5b.3f', 'gg1_7b.2c', 'gg1_9.4l', 'gg1_10.4f', 'gg1_11.4d', 'prom-1.1d', 'prom-2.5c', 'prom-3.1c', 'prom-4.2n', 'prom-5.5n', ]); - expect(gameNamesToRomNames.get('galagamf')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('galagamf')).toIncludeSameMembers([ '2600j.bin', '2700k.bin', '2800l.bin', '3200a.bin', '3300b.bin', '3400c.bin', '3500d.bin', '3600fast.bin', '3700g.bin', 'prom-1.1d', 'prom-2.5c', 'prom-3.1c', 'prom-4.2n', 'prom-5.5n', ]); - expect(gameNamesToRomNames.get('galagamk')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('galagamk')).toIncludeSameMembers([ '3400c.bin', 'gg1-5.3f', 'gg1-7b.2c', 'gg1-9.4l', 'gg1-10.4f', 'gg1-11.4d', 'mk2-1', 'mk2-2', 'mk2-4', 'prom-1.1d', 'prom-2.5c', 'prom-3.1c', 'prom-4.2n', 'prom-5.5n', ]); - expect(gameNamesToRomNames.get('galagamw')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('galagamw')).toIncludeSameMembers([ '2600j.bin', '2700k.bin', '2800l.bin', '3200a.bin', '3300b.bin', '3400c.bin', '3500d.bin', '3600e.bin', '3700g.bin', 'prom-1.1d', 'prom-2.5c', 'prom-3.1c', 'prom-4.2n', 'prom-5.5n', ]); - expect(gameNamesToRomNames.get('galagao')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('galagao')).toIncludeSameMembers([ 'gg1-1.3p', 'gg1-2.3m', 'gg1-3.2m', 'gg1-4.2l', 'gg1-5.3f', 'gg1-7.2c', 'gg1-9.4l', 'gg1-10.4f', 'gg1-11.4d', 'prom-1.1d', 'prom-2.5c', 'prom-3.1c', 'prom-4.2n', 'prom-5.5n', ]); - expect(gameNamesToRomNames.get('gallag')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('gallag')).toIncludeSameMembers([ 'gallag.1', 'gallag.2', 'gallag.3', 'gallag.4', 'gallag.5', 'gallag.6', 'gallag.7', 'gallag.8', 'gallag.9', 'gallag.a', 'prom-1.1d', 'prom-2.5c', 'prom-3.1c', 'prom-4.2n', 'prom-5.5n', ]); - expect(gameNamesToRomNames.get('gatsbee')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('gatsbee')).toIncludeSameMembers([ '1.4b', '2.4c', '3.4d', '4.4e', '8.5r', '9.6a', '10.7a', 'gallag.6', 'gg1-5.3f', 'gg1-7.2c', 'prom-1.1d', 'prom-2.5c', 'prom-3.1c', 'prom-4.2n', 'prom-5.5n', ]); - expect(gameNamesToRomNames.get('nebulbee')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('nebulbee')).toIncludeSameMembers([ '1c.bin', '1d.bin', '2n.bin', '5c.bin', 'gg1-5', 'gg1-7', 'gg1_3.2m', 'gg1_9.4l', 'gg1_10.4f', 'gg1_11.4d', 'nebulbee.01', 'nebulbee.02', 'nebulbee.04', 'nebulbee.07', 'prom-5.5n', ]); - expect(gameNamesToRomNames.get('liblrabl')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('liblrabl')).toIncludeSameMembers([ '2c.rom', '5b.rom', '5c.rom', '5p.rom', '8c.rom', '9t.rom', '10c.rom', 'lr1-1.1t', 'lr1-2.1s', 'lr1-3.1r', 'lr1-4.3d', 'lr1-5.5l', 'lr1-6.2p', ]); // No change to BIOS or devices - expect(result.getGames().filter((game) => game.isBios())).toHaveLength(2); + expect(result.getGames().filter((game) => game.isBios())).toHaveLength(3); expect(result.getGames().filter((game) => game.isDevice())).toHaveLength(65); expect(gameNamesToRomNames.get('aristmk6')).toHaveLength(96); expect(gameNamesToRomNames.get('neogeo')).toHaveLength(34); }); - it('should split', async () => { + it('should split', () => { // Given const options = new Options({ mergeRoms: MergeMode[MergeMode.SPLIT].toLowerCase(), }); // When - const result = await new DATMergerSplitter(options, new ProgressBarFake()).merge(dat); + const result = new DATMergerSplitter(options, new ProgressBarFake()).merge(dat); // Then nothing was merged expect(result.getParents()).toHaveLength(dat.getParents().length); @@ -1900,47 +2093,67 @@ describe('MAME v0.258', () => { return map; }, new Map()); + const gameNamesToDiskNames = result.getGames() + .reduce((map, game) => { + map.set(game.getName(), game.getDisks().map((disk) => disk.getName().replace(/[\\/]/g, '\\'))); + return map; + }, new Map()); + // No change - expect(gameNamesToRomNames.get('aes')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('aes')).toIncludeSameMembers([ '000-lo.lo', 'neo-epo.bin', 'neo-po.bin', 'neodebug.rom', 'uni-bios_1_3.rom', 'uni-bios_2_0.rom', 'uni-bios_2_1.rom', 'uni-bios_2_2.rom', 'uni-bios_2_3.rom', 'uni-bios_2_3o.rom', 'uni-bios_3_0.rom', 'uni-bios_3_1.rom', 'uni-bios_3_2.rom', 'uni-bios_3_3.rom', 'uni-bios_4_0.rom', ]); - expect(gameNamesToRomNames.get('bbtime')).toIncludeAllMembers(['bbtime.svg', 'hd38820a65']); - expect(gameNamesToRomNames.get('liblrabl')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('area51mx')).toIncludeSameMembers([ + '2.0_68020_max-a51_kit_3h.3h', '2.0_68020_max-a51_kit_3k.3k', '2.0_68020_max-a51_kit_3m.3m', + '2.0_68020_max-a51_kit_3p.3p', 'jagwave.rom', + ]); + expect(gameNamesToDiskNames.get('area51mx')).toIncludeSameMembers(['area51mx']); + expect(gameNamesToRomNames.get('bbtime')).toIncludeSameMembers(['bbtime.svg', 'hd38820a65']); + expect(gameNamesToRomNames.get('liblrabl')).toIncludeSameMembers([ '2c.rom', '5b.rom', '5c.rom', '5p.rom', '8c.rom', '9t.rom', '10c.rom', 'lr1-1.1t', 'lr1-2.1s', 'lr1-3.1r', 'lr1-4.3d', 'lr1-5.5l', 'lr1-6.2p', ]); + // Clones exclude parent ROMs - expect(gameNamesToRomNames.get('100lions')).toIncludeAllMembers(['10219211.u73', '10219211.u86']); - expect(gameNamesToRomNames.get('100lionsa')).toIncludeAllMembers(['30223811.u73', '30223811.u86']); - expect(gameNamesToRomNames.get('1942')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('100lions')).toIncludeSameMembers(['10219211.u73', '10219211.u86']); + expect(gameNamesToRomNames.get('100lionsa')).toIncludeSameMembers(['30223811.u73', '30223811.u86']); + expect(gameNamesToRomNames.get('1942')).toIncludeSameMembers([ 'sb-0.f1', 'sb-1.k6', 'sb-2.d1', 'sb-3.d2', 'sb-4.d6', 'sb-5.e8', 'sb-6.e9', 'sb-7.e10', 'sb-8.k3', 'sb-9.m11', 'sr-01.c11', 'sr-02.f2', 'sr-08.a1', 'sr-09.a2', 'sr-10.a3', 'sr-11.a4', 'sr-12.a5', 'sr-13.a6', 'sr-14.l1', 'sr-15.l2', 'sr-16.n1', 'sr-17.n2', 'srb-03.m3', 'srb-04.m4', 'srb-05.m5', 'srb-06.m6', 'srb-07.m7', ]); - expect(gameNamesToRomNames.get('1942a')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('1942a')).toIncludeSameMembers([ 'sr-04.m4', 'sr-05.m5', 'sr-06.m6', 'sr-07.m7', 'sra-03.m3', ]); - expect(gameNamesToRomNames.get('1942abl')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('1942abl')).toIncludeSameMembers([ '3.bin', '5.bin', '7.bin', '9.bin', '11.bin', '13.bin', '14.bin', '16.bin', ]); - expect(gameNamesToRomNames.get('1942b')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('1942b')).toIncludeSameMembers([ 'sr-03.m3', 'sr-04.m4', 'sr-05.m5', 'sr-06.m6', 'sr-07.m7', ]); - expect(gameNamesToRomNames.get('1942h')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('1942h')).toIncludeSameMembers([ 'supercharger_1942_@3.m3', 'supercharger_1942_@4.m4', 'supercharger_1942_@5.m5', ]); - expect(gameNamesToRomNames.get('1942p')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('1942p')).toIncludeSameMembers([ '1.bin', '2.bin', '3.bin', '04.bin', '5.bin', '6.bin', '7.bin', '9.bin', '10.bin', '11.bin', '12.bin', ]); - expect(gameNamesToRomNames.get('1942w')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('1942w')).toIncludeSameMembers([ 'sw-02.f2', 'sw-03.m3', 'sw-04.m4', 'sw-05.m5', 'sw-07.m7', ]); - expect(gameNamesToRomNames.get('c64')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('2spicy')).toIncludeSameMembers([ + // It exactly matches its BIOS + ]); + expect(gameNamesToRomNames.get('a51mxr3k')).toIncludeSameMembers([ + '1.0_r3k_max-a51_kit_hh.hh', '1.0_r3k_max-a51_kit_hl.hl', '1.0_r3k_max-a51_kit_lh.lh', + '1.0_r3k_max-a51_kit_ll.ll', + ]); + expect(gameNamesToDiskNames.get('a51mxr3k')).toIncludeSameMembers([]); + expect(gameNamesToRomNames.get('c64')).toIncludeSameMembers([ '901225-01.u5', '901226-01.u3', '901227-01.u4', '901227-02.u4', '901227-03.u4', '906114-01.u17', 'digidos.u4', 'dosrom12.u4', 'exos3.u4', 'exos4.u4', 'jiffydos c64.u4', 'kernal-10-mager.u4', 'kernal-20-1.u4', 'kernal-20-1_au.u4', 'kernal-20-2.u4', 'kernal-20-3.u4', 'kernal-30.u4', @@ -1949,59 +2162,59 @@ describe('MAME v0.258', () => { 'turboaccess301.u4', 'turboaccess302.u4', 'turboprocess.u4', 'turboprocessus.u4', 'turborom2.u4', 'turborom.u4', ]); - expect(gameNamesToRomNames.get('ddonpach')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('ddonpach')).toIncludeSameMembers([ 'b1.u27', 'b2.u26', 'eeprom-ddonpach.bin', 'u6.bin', 'u7.bin', 'u50.bin', 'u51.bin', 'u52.bin', 'u53.bin', 'u60.bin', 'u61.bin', 'u62.bin', ]); - expect(gameNamesToRomNames.get('ddonpacha')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('ddonpacha')).toIncludeSameMembers([ 'arrange_u26.bin', 'arrange_u27.bin', 'arrange_u51.bin', 'arrange_u62.bin', 'eeprom-ddonpach.bin', ]); - expect(gameNamesToRomNames.get('ddonpachj')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('ddonpachj')).toIncludeSameMembers([ 'u26.bin', 'u27.bin', ]); - expect(gameNamesToRomNames.get('galaga')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('galaga')).toIncludeSameMembers([ 'gg1_1b.3p', 'gg1_2b.3m', 'gg1_3.2m', 'gg1_4b.2l', 'gg1_5b.3f', 'gg1_7b.2c', 'gg1_9.4l', 'gg1_10.4f', 'gg1_11.4d', 'prom-1.1d', 'prom-2.5c', 'prom-3.1c', 'prom-4.2n', 'prom-5.5n', ]); - expect(gameNamesToRomNames.get('galagamf')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('galagamf')).toIncludeSameMembers([ '3200a.bin', '3300b.bin', '3400c.bin', '3500d.bin', '3600fast.bin', '3700g.bin', ]); - expect(gameNamesToRomNames.get('galagamk')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('galagamk')).toIncludeSameMembers([ '3400c.bin', 'gg1-5.3f', 'mk2-1', 'mk2-2', 'mk2-4', ]); - expect(gameNamesToRomNames.get('galagamw')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('galagamw')).toIncludeSameMembers([ '3200a.bin', '3300b.bin', '3400c.bin', '3500d.bin', '3600e.bin', '3700g.bin', ]); - expect(gameNamesToRomNames.get('galagao')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('galagao')).toIncludeSameMembers([ 'gg1-1.3p', 'gg1-2.3m', 'gg1-4.2l', 'gg1-5.3f', 'gg1-7.2c', ]); - expect(gameNamesToRomNames.get('gallag')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('gallag')).toIncludeSameMembers([ 'gallag.1', 'gallag.2', 'gallag.4', 'gallag.5', 'gallag.6', 'gallag.7', 'gallag.8', ]); - expect(gameNamesToRomNames.get('gatsbee')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('gatsbee')).toIncludeSameMembers([ '1.4b', '2.4c', '3.4d', '4.4e', '8.5r', '9.6a', '10.7a', 'gallag.6', 'gg1-5.3f', 'gg1-7.2c', ]); - expect(gameNamesToRomNames.get('nebulbee')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('nebulbee')).toIncludeSameMembers([ '1c.bin', '1d.bin', '2n.bin', '5c.bin', 'gg1-5', 'gg1-7', 'nebulbee.01', 'nebulbee.02', 'nebulbee.04', 'nebulbee.07', ]); // No change to BIOS or devices - expect(result.getGames().filter((game) => game.isBios())).toHaveLength(2); + expect(result.getGames().filter((game) => game.isBios())).toHaveLength(3); expect(result.getGames().filter((game) => game.isDevice())).toHaveLength(65); expect(gameNamesToRomNames.get('aristmk6')).toHaveLength(96); expect(gameNamesToRomNames.get('neogeo')).toHaveLength(34); }); - it('should merged', async () => { + it('should merged', () => { // Given const options = new Options({ mergeRoms: MergeMode[MergeMode.MERGED].toLowerCase(), }); // When - const result = await new DATMergerSplitter(options, new ProgressBarFake()).merge(dat); + const result = new DATMergerSplitter(options, new ProgressBarFake()).merge(dat); // Then clones were merged expect(result.getParents()).toHaveLength(dat.getParents().length); @@ -2013,15 +2226,21 @@ describe('MAME v0.258', () => { return map; }, new Map()); + const gameNamesToDiskNames = result.getGames() + .reduce((map, game) => { + map.set(game.getName(), game.getDisks().map((disk) => disk.getName().replace(/[\\/]/g, '\\'))); + return map; + }, new Map()); + // No change from regular non-merged (because there are no clones) - expect(gameNamesToRomNames.get('aes')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('aes')).toIncludeSameMembers([ '000-lo.lo', 'neo-epo.bin', 'neo-po.bin', 'neodebug.rom', 'uni-bios_1_3.rom', 'uni-bios_2_0.rom', 'uni-bios_2_1.rom', 'uni-bios_2_2.rom', 'uni-bios_2_3.rom', 'uni-bios_2_3o.rom', 'uni-bios_3_0.rom', 'uni-bios_3_1.rom', 'uni-bios_3_2.rom', 'uni-bios_3_3.rom', 'uni-bios_4_0.rom', ]); - expect(gameNamesToRomNames.get('bbtime')).toIncludeAllMembers(['bbtime.svg', 'hd38820a65']); - expect(gameNamesToRomNames.get('c64')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('bbtime')).toIncludeSameMembers(['bbtime.svg', 'hd38820a65']); + expect(gameNamesToRomNames.get('c64')).toIncludeSameMembers([ // NOTE(cemmer): excludes clones '901225-01.u5', '901226-01.u3', '901227-01.u4', '901227-02.u4', '901227-03.u4', '906114-01.u17', 'digidos.u4', 'dosrom12.u4', 'exos3.u4', 'exos4.u4', 'jiffydos c64.u4', 'kernal-10-mager.u4', @@ -2031,17 +2250,18 @@ describe('MAME v0.258', () => { 'turboaccess301.u4', 'turboaccess302.u4', 'turboprocess.u4', 'turboprocessus.u4', 'turborom2.u4', 'turborom.u4', ]); - expect(gameNamesToRomNames.get('liblrabl')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('liblrabl')).toIncludeSameMembers([ '2c.rom', '5b.rom', '5c.rom', '5p.rom', '8c.rom', '9t.rom', '10c.rom', 'lr1-1.1t', 'lr1-2.1s', 'lr1-3.1r', 'lr1-4.3d', 'lr1-5.5l', 'lr1-6.2p', ]); + // Clones are merged in - expect(gameNamesToRomNames.get('100lions')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('100lions')).toIncludeSameMembers([ '100lionsa\\30223811.u73', '100lionsa\\30223811.u86', '10219211.u73', '10219211.u86', ]); expect(gameNamesToRomNames.has('100lionsa')).toEqual(false); - expect(gameNamesToRomNames.get('1942')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('1942')).toIncludeSameMembers([ '1942a\\sr-04.m4', '1942a\\sr-05.m5', '1942a\\sr-06.m6', '1942a\\sr-07.m7', '1942a\\sra-03.m3', '1942abl\\3.bin', '1942abl\\7.bin', '1942abl\\9.bin', '1942abl\\11.bin', '1942abl\\13.bin', '1942abl\\14.bin', '1942abl\\16.bin', '1942b\\sr-03.m3', @@ -2059,7 +2279,19 @@ describe('MAME v0.258', () => { expect(gameNamesToRomNames.has('1942h')).toEqual(false); expect(gameNamesToRomNames.has('1942p')).toEqual(false); expect(gameNamesToRomNames.has('1942w')).toEqual(false); - expect(gameNamesToRomNames.get('galaga')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('2spicy')).toIncludeSameMembers([ + // It exactly matches its BIOS + ]); + expect(gameNamesToRomNames.get('area51mx')).toIncludeSameMembers([ + 'a51mxr3k\\1.0_r3k_max-a51_kit_hh.hh', 'a51mxr3k\\1.0_r3k_max-a51_kit_hl.hl', + 'a51mxr3k\\1.0_r3k_max-a51_kit_lh.lh', 'a51mxr3k\\1.0_r3k_max-a51_kit_ll.ll', + '2.0_68020_max-a51_kit_3h.3h', '2.0_68020_max-a51_kit_3k.3k', '2.0_68020_max-a51_kit_3m.3m', + '2.0_68020_max-a51_kit_3p.3p', 'jagwave.rom', + ]); + expect(gameNamesToDiskNames.get('area51mx')).toIncludeSameMembers(['area51mx']); + expect(gameNamesToRomNames.has('a51mxr3k')).toEqual(false); + expect(gameNamesToDiskNames.has('a51mxr3k')).toEqual(false); + expect(gameNamesToRomNames.get('galaga')).toIncludeSameMembers([ 'galagamf\\3200a.bin', 'galagamf\\3300b.bin', 'galagamf\\3400c.bin', 'galagamf\\3500d.bin', 'galagamf\\3600fast.bin', 'galagamf\\3700g.bin', 'galagamk\\gg1-5.3f', 'galagamk\\mk2-1', 'galagamk\\mk2-2', 'galagamk\\mk2-4', 'galagamw\\3600e.bin', 'galagao\\gg1-1.3p', 'galagao\\gg1-2.3m', 'galagao\\gg1-4.2l', 'galagao\\gg1-7.2c', @@ -2069,7 +2301,7 @@ describe('MAME v0.258', () => { 'gg1_1b.3p', 'gg1_2b.3m', 'gg1_3.2m', 'gg1_4b.2l', 'gg1_5b.3f', 'gg1_7b.2c', 'gg1_9.4l', 'gg1_10.4f', 'gg1_11.4d', 'prom-1.1d', 'prom-2.5c', 'prom-3.1c', 'prom-4.2n', 'prom-5.5n', ]); - expect(gameNamesToRomNames.get('ddonpach')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('ddonpach')).toIncludeSameMembers([ 'ddonpacha\\arrange_u26.bin', 'ddonpacha\\arrange_u27.bin', 'ddonpacha\\arrange_u51.bin', 'ddonpacha\\arrange_u62.bin', 'ddonpacha\\eeprom-ddonpach.bin', 'ddonpachj\\u26.bin', 'ddonpachj\\u27.bin', 'b1.u27', 'b2.u26', 'eeprom-ddonpach.bin', 'u6.bin', 'u7.bin', 'u50.bin', @@ -2086,7 +2318,7 @@ describe('MAME v0.258', () => { expect(gameNamesToRomNames.has('nebulbee')).toEqual(false); // No change to BIOS or devices - expect(result.getGames().filter((game) => game.isBios())).toHaveLength(2); + expect(result.getGames().filter((game) => game.isBios())).toHaveLength(3); expect(result.getGames().filter((game) => game.isDevice())).toHaveLength(65); expect(gameNamesToRomNames.get('aristmk6')).toHaveLength(96); expect(gameNamesToRomNames.get('neogeo')).toHaveLength(34); @@ -2252,14 +2484,14 @@ describe('FinalBurn Neo Neo Geo e544671', () => { }), ]); - it('should split', async () => { + it('should split', () => { // Given const options = new Options({ mergeRoms: MergeMode[MergeMode.SPLIT].toLowerCase(), }); // When - const result = await new DATMergerSplitter(options, new ProgressBarFake()).merge(dat); + const result = new DATMergerSplitter(options, new ProgressBarFake()).merge(dat); // Then nothing was merged expect(result.getParents()).toHaveLength(dat.getParents().length); @@ -2272,7 +2504,7 @@ describe('FinalBurn Neo Neo Geo e544671', () => { }, new Map()); // No change - expect(gameNamesToRomNames.get('neogeo')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('neogeo')).toIncludeSameMembers([ 'sp-s3.sp1', 'sp-s2.sp1', 'sp-s.sp1', 'sp-u2.sp1', 'sp1-u2', 'sp-e.sp1', 'sp1-u4.bin', 'sp1-u3.bin', 'vs-bios.rom', 'sp-j2.sp1', 'sp1.jipan.1024', 'sp-45.sp1', 'sp-j3.sp1', 'japan-j3.bin', 'sp1-j3.bin', 'neo-po.bin', 'neo-epo.bin', 'neodebug.bin', 'sp-1v1_3db8c.bin', @@ -2283,11 +2515,11 @@ describe('FinalBurn Neo Neo Geo e544671', () => { 'sfix.sfix', '000-lo.lo', ]); // Clones exclude parent ROMs - expect(gameNamesToRomNames.get('3countb')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('3countb')).toIncludeSameMembers([ '043-p1.p1', '043-s1.s1', '043-c1.c1', '043-c2.c2', '043-c3.c3', '043-c4.c4', '043-m1.m1', '043-v1.v1', '043-v2.v2', ]); - expect(gameNamesToRomNames.get('3countba')).toIncludeAllMembers([ + expect(gameNamesToRomNames.get('3countba')).toIncludeSameMembers([ '043-epr.ep1', '043-epr.ep2', ]); }); diff --git a/test/modules/datParentInferrer.test.ts b/test/modules/datParentInferrer.test.ts index f48b39047..814fb8d15 100644 --- a/test/modules/datParentInferrer.test.ts +++ b/test/modules/datParentInferrer.test.ts @@ -13,7 +13,7 @@ function buildDat(gameNames: string[]): DAT { ); } -it('should not do anything if the DAT has parent/clone info', async () => { +it('should not do anything if the DAT has parent/clone info', () => { // Given const dat = new LogiqxDAT(new Header(), [ new Game({ name: 'game one' }), @@ -21,13 +21,13 @@ it('should not do anything if the DAT has parent/clone info', async () => { ]); // When - const inferredDat = await new DATParentInferrer(new Options(), new ProgressBarFake()).infer(dat); + const inferredDat = new DATParentInferrer(new Options(), new ProgressBarFake()).infer(dat); // Then expect(inferredDat === dat).toEqual(true); }); -it('should ignore the DAT\'s parent/clone info if specified', async () => { +it('should ignore the DAT\'s parent/clone info if specified', () => { // Given const options = new Options({ datIgnoreParentClone: true, @@ -38,7 +38,7 @@ it('should ignore the DAT\'s parent/clone info if specified', async () => { ]); // When - const inferredDat = await new DATParentInferrer(options, new ProgressBarFake()).infer(dat); + const inferredDat = new DATParentInferrer(options, new ProgressBarFake()).infer(dat); // Then expect(inferredDat === dat).toEqual(false); @@ -46,12 +46,12 @@ it('should ignore the DAT\'s parent/clone info if specified', async () => { expect(inferredDat.getParents().every((parent) => parent.getGames().length === 1)).toEqual(true); }); -it('should not do anything if the DAT has no games', async () => { +it('should not do anything if the DAT has no games', () => { // Given const dat = new LogiqxDAT(new Header(), []); // When - const inferredDat = await new DATParentInferrer(new Options(), new ProgressBarFake()).infer(dat); + const inferredDat = new DATParentInferrer(new Options(), new ProgressBarFake()).infer(dat); // Then expect(inferredDat === dat).toEqual(true); @@ -117,6 +117,17 @@ describe('similar games', () => { 'All Star Tennis \'99 (Europe) (En,Fr,De,Es,It)', 'All Star Tennis 99 (USA)', ], 'All Star Tennis 99 (USA)'], + [[ + '[BIOS] PS3 System Software Update (World) (v4.88)', + '[BIOS] PS3 System Software Update (World) (v3.41) (Patch)', + '[BIOS] PS3 System Software Update (World) (v0.90) (Tool)', + '[BIOS] PS3 System Software Update (World) (v0.91-005) (Tool)', + '[BIOS] PS3 System Software Update (World) (v3.41) (Shop)', + '[BIOS] PS3 System Software Update (World) (v3.41-1)', + '[BIOS] PS3 System Software Update (World) (v1.60) (Debug) [b]', + '[BIOS] PS3 System Software Update (World) (v1.00) (Disc)', + '[BIOS] PS3 System Software Update (World) (v4.70) (Arcade)', + ], '[BIOS] PS3 System Software Update (World) (v4.88)'], // https://emulation.gametechwiki.com/index.php/GoodTools [[ 'A game (1990)(Side A).zip', @@ -179,19 +190,48 @@ describe('similar games', () => { 'F1 World Grand Prix v1.006 (2000)(Video System)(US)(M4)[!]', ], 'F1 World Grand Prix for Dreamcast v1.011 (1999)(Video System)(JP)(en)[!]'], [[ - '[BIOS] PS3 System Software Update (World) (v4.88)', - '[BIOS] PS3 System Software Update (World) (v3.41) (Patch)', - '[BIOS] PS3 System Software Update (World) (v0.90) (Tool)', - '[BIOS] PS3 System Software Update (World) (v0.91-005) (Tool)', - '[BIOS] PS3 System Software Update (World) (v3.41) (Shop)', - '[BIOS] PS3 System Software Update (World) (v3.41-1)', - '[BIOS] PS3 System Software Update (World) (v1.60) (Debug) [b]', - '[BIOS] PS3 System Software Update (World) (v1.00) (Disc)', - '[BIOS] PS3 System Software Update (World) (v4.70) (Arcade)', - ], '[BIOS] PS3 System Software Update (World) (v4.88)'], - ])('should group similar games: %s', async (gameNames, expectedGameName) => { + '18 Wheeler - American Pro Trucker (2001)(Sega)(US)', + '18 Wheeler - American Pro Trucker v1.006 (2000)(Sega)(JP)(en)[!]', + '18 Wheeler - American Pro Trucker v1.500 (2001)(Sega)(US)[!]', + '18 Wheeler - American Pro Trucker v1.700 (2001)(Sega)(PAL)(M4)[!]', + ], '18 Wheeler - American Pro Trucker (2001)(Sega)(US)'], + [[ + 'Airforce Delta v1.000 (1999)(Konami)(US)[!][1S]', + 'Airforce Delta v1.000 (1999)(Konami)(US)[!][2S]', + 'Airforce Delta v1.000 (1999)(Konami)(US)[3S]', + 'Airforce Delta v1.002 (1999)(Konami)(JP)[!]', + ], 'Airforce Delta v1.000 (1999)(Konami)(US)[!][1S]'], + [[ + 'Biohazard - Code Veronica Shokai Genteiban v1.002 (1999)(Capcom)(JP)(Disc 1 of 2)[!][2, 3]', + 'Biohazard - Code Veronica Shokai Genteiban v1.002 (1999)(Capcom)(JP)(Disc 1 of 2)[!][2M1, 2M3, 2MB1]', + 'Biohazard - Code Veronica Shokai Genteiban v1.002 (1999)(Capcom)(JP)(Disc 1 of 2)[!][HK112D, HK112E]', + ], 'Biohazard - Code Veronica Shokai Genteiban v1.002 (1999)(Capcom)(JP)(Disc 1 of 2)[!][2, 3]'], + [[ + 'Comic Party v2.001 (2001)(Aqua Plus)(JP)(Disc 1 of 2)[!][10MM1]', + 'Comic Party v2.001 (2001)(Aqua Plus)(JP)(Disc 1 of 2)[!][12MM1]', + 'Comic Party v2.001 (2001)(Aqua Plus)(JP)(Disc 1 of 2)[!][14M1]', + 'Comic Party v2.001 (2001)(Aqua Plus)(JP)(Disc 1 of 2)[!][15M1, 15M2]', + 'Comic Party v3.004 (2001)(Aqua Plus)(JP)(Disc 1 of 2)[!]', + ], 'Comic Party v2.001 (2001)(Aqua Plus)(JP)(Disc 1 of 2)[!][10MM1]'], + [[ + 'Generator Vol. 1 v1.002 (1999)(Sega)(US)[!][14S]', + 'Generator Vol. 1 v1.002 (1999)(Sega)(US)[!][1M5, 1MM1]', + 'Generator Vol. 1 v1.002 (1999)(Sega)(US)[!][2MB13, 2MB14, 2MB32]', + 'Generator Vol. 1 v1.002 (1999)(Sega)(US)[!][2MB3, 7MM1]', + 'Generator Vol. 1 v1.002 (1999)(Sega)(US)[!][5M2, 5M3]', + 'Generator Vol. 1 v1.002 (1999)(Sega)(US)[!][8MB1, 8MB4]', + 'Generator Vol. 1 v1.002 (1999)(Sega)(US)[3MM1]', + 'Generator Vol. 1 v1.010 (1999)(Sega)(JP)(en)[!]', + ], 'Generator Vol. 1 v1.002 (1999)(Sega)(US)[!][14S]'], + [[ + 'NFL 2K v1.007 (1999)(Sega)(US)[!][10S]', + 'NFL 2K v1.007 (1999)(Sega)(US)[!][13S]', + 'NFL 2K v1.007 (1999)(Sega)(US)[!][9S]', + 'NFL 2K v1.007 (1999)(Sega)(US)[!][MT B08, B13, B17, B19, B20]', + ], 'NFL 2K v1.007 (1999)(Sega)(US)[!][10S]'], + ])('should group similar games: %s', (gameNames, expectedGameName) => { const ungroupedDat = buildDat(gameNames); - const groupedDat = await new DATParentInferrer(new Options(), new ProgressBarFake()) + const groupedDat = new DATParentInferrer(new Options(), new ProgressBarFake()) .infer(ungroupedDat); expect(groupedDat.getParents()).toHaveLength(1); expect(groupedDat.getParents()[0].getGames()).toHaveLength(ungroupedDat.getGames().length); @@ -216,9 +256,9 @@ describe('dissimilar games', () => { 'Final Fantasy VII (USA) (Interactive Sampler CD)', 'Final Fantasy VII (USA) (Square Soft on PlayStation Previews)', ]], - ])('should not group different discs', async (gameNames) => { + ])('should not group different discs', (gameNames) => { const ungroupedDat = buildDat(gameNames); - const groupedDat = await new DATParentInferrer(new Options(), new ProgressBarFake()) + const groupedDat = new DATParentInferrer(new Options(), new ProgressBarFake()) .infer(ungroupedDat); expect(groupedDat.getParents()).toHaveLength(gameNames.length); expect(groupedDat.getParents().every((parent) => parent.getGames().length === 1)).toEqual(true); @@ -235,9 +275,9 @@ describe('dissimilar games', () => { 'Madden NFL 2004 (USA)', 'Madden NFL 2005 (USA)', ]], - ])('should not group different years', async (gameNames) => { + ])('should not group different years', (gameNames) => { const ungroupedDat = buildDat(gameNames); - const groupedDat = await new DATParentInferrer(new Options(), new ProgressBarFake()) + const groupedDat = new DATParentInferrer(new Options(), new ProgressBarFake()) .infer(ungroupedDat); expect(groupedDat.getParents()).toHaveLength(gameNames.length); expect(groupedDat.getParents().every((parent) => parent.getGames().length === 1)).toEqual(true); @@ -249,9 +289,9 @@ describe('dissimilar games', () => { 'Hitman - Contracts (Europe)', 'Hitman - Silent Assassin (Japan)', ]], - ])('should not group different taglines', async (gameNames) => { + ])('should not group different taglines', (gameNames) => { const ungroupedDat = buildDat(gameNames); - const groupedDat = await new DATParentInferrer(new Options(), new ProgressBarFake()) + const groupedDat = new DATParentInferrer(new Options(), new ProgressBarFake()) .infer(ungroupedDat); expect(groupedDat.getParents()).toHaveLength(gameNames.length); expect(groupedDat.getParents().every((parent) => parent.getGames().length === 1)).toEqual(true); diff --git a/test/modules/datScanner.test.ts b/test/modules/datScanner.test.ts index 7cdba2c09..463e11d14 100644 --- a/test/modules/datScanner.test.ts +++ b/test/modules/datScanner.test.ts @@ -4,6 +4,8 @@ import path from 'node:path'; import which from 'which'; import DATScanner from '../../src/modules/datScanner.js'; +import FileCache from '../../src/types/files/fileCache.js'; +import FileFactory from '../../src/types/files/fileFactory.js'; import Options, { OptionsProps } from '../../src/types/options.js'; import ProgressBarFake from '../console/progressBarFake.js'; @@ -11,6 +13,7 @@ function createDatScanner(props: OptionsProps): DATScanner { return new DATScanner( new Options(props), new ProgressBarFake(), + new FileFactory(new FileCache()), ); } diff --git a/test/modules/dir2DatCreator.test.ts b/test/modules/dir2DatCreator.test.ts index 4e58aa8e6..b03aca87b 100644 --- a/test/modules/dir2DatCreator.test.ts +++ b/test/modules/dir2DatCreator.test.ts @@ -1,5 +1,7 @@ import 'jest-extended'; +import path from 'node:path'; + import CandidateGenerator from '../../src/modules/candidateGenerator.js'; import DATGameInferrer from '../../src/modules/datGameInferrer.js'; import DATScanner from '../../src/modules/datScanner.js'; @@ -8,9 +10,10 @@ import ROMIndexer from '../../src/modules/romIndexer.js'; import ROMScanner from '../../src/modules/romScanner.js'; import FsPoly from '../../src/polyfill/fsPoly.js'; import DAT from '../../src/types/dats/dat.js'; +import FileCache from '../../src/types/files/fileCache.js'; +import FileFactory from '../../src/types/files/fileFactory.js'; import Options from '../../src/types/options.js'; import ReleaseCandidate from '../../src/types/releaseCandidate.js'; -import ROMWithFiles from '../../src/types/romWithFiles.js'; import ProgressBarFake from '../console/progressBarFake.js'; it('should do nothing if dir2dat command not provided', async () => { @@ -19,17 +22,21 @@ it('should do nothing if dir2dat command not provided', async () => { // No command provided input: ['test/fixtures/roms'], }); - const files = await new ROMScanner(options, new ProgressBarFake()).scan(); + const files = await new ROMScanner( + options, + new ProgressBarFake(), + new FileFactory(new FileCache()), + ).scan(); // And a DAT - const inferredDats = new DATGameInferrer(options, new ProgressBarFake()).infer(files); + const inferredDats = await new DATGameInferrer(options, new ProgressBarFake()).infer(files); expect(inferredDats).toHaveLength(1); const [inferredDat] = inferredDats; // And candidates const candidates = await new CandidateGenerator(options, new ProgressBarFake()).generate( inferredDat, - await new ROMIndexer(options, new ProgressBarFake()).index(files), + new ROMIndexer(options, new ProgressBarFake()).index(files), ); // When writing the DAT to disk @@ -46,17 +53,21 @@ it('should write a valid DAT', async () => { commands: ['dir2dat'], input: ['test/fixtures/roms'], }); - const files = await new ROMScanner(options, new ProgressBarFake()).scan(); + const files = await new ROMScanner( + options, + new ProgressBarFake(), + new FileFactory(new FileCache()), + ).scan(); // And a DAT - const inferredDats = new DATGameInferrer(options, new ProgressBarFake()).infer(files); + const inferredDats = await new DATGameInferrer(options, new ProgressBarFake()).infer(files); expect(inferredDats).toHaveLength(1); const [inferredDat] = inferredDats; // And candidates const candidates = await new CandidateGenerator(options, new ProgressBarFake()).generate( inferredDat, - await new ROMIndexer(options, new ProgressBarFake()).index(files), + new ROMIndexer(options, new ProgressBarFake()).index(files), ); // When writing the DAT to disk @@ -75,7 +86,7 @@ it('should write a valid DAT', async () => { const writtenDats = await new DATScanner(new Options({ ...options, dat: [dir2dat], - }), new ProgressBarFake()).scan(); + }), new ProgressBarFake(), new FileFactory(new FileCache())).scan(); expect(writtenDats).toHaveLength(1); [writtenDat] = writtenDats; } finally { @@ -97,19 +108,23 @@ it('should use the candidates for games and ROMs', async () => { // Given some input ROMs const options = new Options({ commands: ['dir2dat'], - input: ['test/fixtures/roms'], + input: [path.join('test', 'fixtures', 'roms')], }); - const files = await new ROMScanner(options, new ProgressBarFake()).scan(); + const files = await new ROMScanner( + options, + new ProgressBarFake(), + new FileFactory(new FileCache()), + ).scan(); // And a DAT - const inferredDats = new DATGameInferrer(options, new ProgressBarFake()).infer(files); + const inferredDats = await new DATGameInferrer(options, new ProgressBarFake()).infer(files); expect(inferredDats).toHaveLength(1); const [inferredDat] = inferredDats; // And candidates const candidates = await new CandidateGenerator(options, new ProgressBarFake()).generate( inferredDat, - await new ROMIndexer(options, new ProgressBarFake()).index(files), + new ROMIndexer(options, new ProgressBarFake()).index(files), ); // When manipulating the candidates @@ -118,11 +133,8 @@ it('should use the candidates for games and ROMs', async () => { releaseCandidates.map((candidate) => new ReleaseCandidate( candidate.getGame().withProps({ name: `${candidate.getGame().getName()} (updated)` }), candidate.getRelease(), - candidate.getRomsWithFiles().map((romWithFiles) => new ROMWithFiles( - romWithFiles.getRom().withName(`${romWithFiles.getRom().getName()} (updated)`), - romWithFiles.getInputFile(), - romWithFiles.getOutputFile(), - )), + candidate.getRomsWithFiles().map((romWithFiles) => romWithFiles + .withRom(romWithFiles.getRom().withName(`${romWithFiles.getRom().getName()} (updated)`))), ))])); // When writing the DAT to disk @@ -141,7 +153,7 @@ it('should use the candidates for games and ROMs', async () => { const writtenDats = await new DATScanner(new Options({ ...options, dat: [dir2dat], - }), new ProgressBarFake()).scan(); + }), new ProgressBarFake(), new FileFactory(new FileCache())).scan(); expect(writtenDats).toHaveLength(1); [writtenDat] = writtenDats; } finally { diff --git a/test/modules/fixdatCreator.test.ts b/test/modules/fixdatCreator.test.ts index 267b37432..3df832882 100644 --- a/test/modules/fixdatCreator.test.ts +++ b/test/modules/fixdatCreator.test.ts @@ -8,6 +8,8 @@ import LogiqxDAT from '../../src/types/dats/logiqx/logiqxDat.js'; import Parent from '../../src/types/dats/parent.js'; import Release from '../../src/types/dats/release.js'; import ROM from '../../src/types/dats/rom.js'; +import FileCache from '../../src/types/files/fileCache.js'; +import FileFactory from '../../src/types/files/fileFactory.js'; import Options, { OptionsProps } from '../../src/types/options.js'; import ReleaseCandidate from '../../src/types/releaseCandidate.js'; import ROMWithFiles from '../../src/types/romWithFiles.js'; @@ -70,7 +72,7 @@ async function runFixdatCreator( const fixdat = (await new DATScanner(new Options({ ...optionsProps, dat: [fixdatPath], - }), new ProgressBarFake()).scan())[0]; + }), new ProgressBarFake(), new FileFactory(new FileCache())).scan())[0]; await fsPoly.rm(fixdatPath, { force: true }); diff --git a/test/modules/movedRomDeleter.test.ts b/test/modules/movedRomDeleter.test.ts index dc32eec99..2a3811fe5 100644 --- a/test/modules/movedRomDeleter.test.ts +++ b/test/modules/movedRomDeleter.test.ts @@ -1,5 +1,6 @@ import path from 'node:path'; +import Temp from '../../src/globals/temp.js'; import CandidateGenerator from '../../src/modules/candidateGenerator.js'; import MovedROMDeleter from '../../src/modules/movedRomDeleter.js'; import ROMIndexer from '../../src/modules/romIndexer.js'; @@ -11,13 +12,15 @@ import LogiqxDAT from '../../src/types/dats/logiqx/logiqxDat.js'; import ROM from '../../src/types/dats/rom.js'; import Zip from '../../src/types/files/archives/zip.js'; import File from '../../src/types/files/file.js'; +import FileCache from '../../src/types/files/fileCache.js'; +import FileFactory from '../../src/types/files/fileFactory.js'; import Options from '../../src/types/options.js'; import ProgressBarFake from '../console/progressBarFake.js'; it('should do nothing if no ROMs moved', async () => { const romFiles = await new ROMScanner(new Options({ input: ['./test/fixtures/roms'], - }), new ProgressBarFake()).scan(); + }), new ProgressBarFake(), new FileFactory(new FileCache())).scan(); expect(romFiles.length).toBeGreaterThan(0); await new MovedROMDeleter(new ProgressBarFake()).delete(romFiles, [], new Map()); @@ -125,49 +128,53 @@ describe('should delete archives', () => { 'Zero 4 Champ II (Japan).zip', ]], ]))('%s', async (games, expectedDeletedFilePaths) => { - const inputPath = 'input'; - const options = new Options({ - commands: ['move', ...(command ? [command] : [])], - input: [inputPath], - output: 'output', - }); + const inputPath = await fsPoly.mkdtemp(path.join(Temp.getTempDir(), 'input')); + try { + const options = new Options({ + commands: ['move', ...(command ? [command] : [])], + input: [inputPath], + output: 'output', + }); - const dat = new LogiqxDAT(new Header(), games); + const dat = new LogiqxDAT(new Header(), games); - const rawRomFiles = (await Promise.all(dat.getParents() - .flatMap((parent) => parent.getGames()) - .map(async (game): Promise => { - // A path that should not exist - const zip = new Zip(path.join(inputPath, `${game.getName()}.zip`)); - return Promise.all(game.getRoms().map(async (rom) => rom.toArchiveEntry(zip))); - }))) - .flat(); + const rawRomFiles = (await Promise.all(dat.getParents() + .flatMap((parent) => parent.getGames()) + .map(async (game): Promise => { + const zipPath = path.join(inputPath, `${game.getName()}.zip`); + await fsPoly.touch(zipPath); + const zip = new Zip(zipPath); + return Promise.all(game.getRoms().map(async (rom) => rom.toArchiveEntry(zip))); + }))) + .flat(); - const indexedRomFiles = await new ROMIndexer(options, new ProgressBarFake()) - .index(rawRomFiles); - const parentsToCandidates = await new CandidateGenerator(options, new ProgressBarFake()) - .generate(dat, indexedRomFiles); + const indexedRomFiles = new ROMIndexer(options, new ProgressBarFake()).index(rawRomFiles); + const parentsToCandidates = await new CandidateGenerator(options, new ProgressBarFake()) + .generate(dat, indexedRomFiles); - const inputRoms = rawRomFiles; - const movedRoms = [...parentsToCandidates.values()] - .flat() - .flatMap((releaseCandidate) => releaseCandidate.getRomsWithFiles()) - .map((romWithFiles) => romWithFiles.getInputFile()); + const inputRoms = rawRomFiles; + const movedRoms = [...parentsToCandidates.values()] + .flat() + .flatMap((releaseCandidate) => releaseCandidate.getRomsWithFiles()) + .map((romWithFiles) => romWithFiles.getInputFile()); - const writtenRoms = [...parentsToCandidates.values()] - .flat() - .flatMap((releaseCanddiate) => releaseCanddiate.getRomsWithFiles()) - .map((romWithFiles) => romWithFiles.getOutputFile()); - const datsToWrittenRoms = new Map([[dat, writtenRoms]]); + const writtenRoms = [...parentsToCandidates.values()] + .flat() + .flatMap((releaseCanddiate) => releaseCanddiate.getRomsWithFiles()) + .map((romWithFiles) => romWithFiles.getOutputFile()); + const datsToWrittenRoms = new Map([[dat, writtenRoms]]); - const deletedFilePaths = ( - await new MovedROMDeleter(new ProgressBarFake()) - .delete(inputRoms, movedRoms, datsToWrittenRoms) - ) - .map((filePath) => filePath.replace(inputPath + path.sep, '')) - .sort(); + const deletedFilePaths = ( + await new MovedROMDeleter(new ProgressBarFake()) + .delete(inputRoms, movedRoms, datsToWrittenRoms) + ) + .map((filePath) => filePath.replace(inputPath + path.sep, '')) + .sort(); - expect(deletedFilePaths).toEqual(expectedDeletedFilePaths); + expect(deletedFilePaths).toEqual(expectedDeletedFilePaths); + } finally { + await fsPoly.rm(inputPath, { recursive: true }); + } }); }); }); diff --git a/test/modules/patchScanner.test.ts b/test/modules/patchScanner.test.ts index f756f851d..ddd0c680e 100644 --- a/test/modules/patchScanner.test.ts +++ b/test/modules/patchScanner.test.ts @@ -4,12 +4,17 @@ import path from 'node:path'; import Temp from '../../src/globals/temp.js'; import PatchScanner from '../../src/modules/patchScanner.js'; import fsPoly from '../../src/polyfill/fsPoly.js'; +import FileCache from '../../src/types/files/fileCache.js'; import FileFactory from '../../src/types/files/fileFactory.js'; import Options from '../../src/types/options.js'; import ProgressBarFake from '../console/progressBarFake.js'; function createPatchScanner(patch: string[], patchExclude: string[] = []): PatchScanner { - return new PatchScanner(new Options({ patch, patchExclude }), new ProgressBarFake()); + return new PatchScanner( + new Options({ patch, patchExclude }), + new ProgressBarFake(), + new FileFactory(new FileCache()), + ); } it('should throw on nonexistent paths', async () => { diff --git a/test/modules/romHeaderProcessor.test.ts b/test/modules/romHeaderProcessor.test.ts index 57a42107a..3a60b5d88 100644 --- a/test/modules/romHeaderProcessor.test.ts +++ b/test/modules/romHeaderProcessor.test.ts @@ -5,6 +5,8 @@ import ROMHeaderProcessor from '../../src/modules/romHeaderProcessor.js'; import ROMScanner from '../../src/modules/romScanner.js'; import FsPoly from '../../src/polyfill/fsPoly.js'; import File from '../../src/types/files/file.js'; +import FileCache from '../../src/types/files/fileCache.js'; +import FileFactory from '../../src/types/files/fileFactory.js'; import Options from '../../src/types/options.js'; import ProgressBarFake from '../console/progressBarFake.js'; @@ -12,12 +14,12 @@ describe('extension has possible header', () => { it('should do nothing if extension not found', async () => { const inputRomFiles = await new ROMScanner(new Options({ input: ['./test/fixtures/roms/{,**/}*.rom'], - }), new ProgressBarFake()).scan(); + }), new ProgressBarFake(), new FileFactory(new FileCache())).scan(); expect(inputRomFiles.length).toBeGreaterThan(0); const processedRomFiles = await new ROMHeaderProcessor(new Options({ commands: ['copy', 'extract'], - }), new ProgressBarFake()).process(inputRomFiles); + }), new ProgressBarFake(), new FileFactory(new FileCache())).process(inputRomFiles); expect(processedRomFiles).toHaveLength(inputRomFiles.length); for (const [idx, processedRomFile] of processedRomFiles.entries()) { @@ -33,7 +35,7 @@ describe('extension has possible header', () => { const processedRomFiles = await new ROMHeaderProcessor(new Options({ commands: ['copy', 'extract'], - }), new ProgressBarFake()).process(inputRomFiles); + }), new ProgressBarFake(), new FileFactory(new FileCache())).process(inputRomFiles); expect(processedRomFiles).toHaveLength(1); expect(processedRomFiles[0].getFileHeader()).toBeUndefined(); @@ -42,12 +44,12 @@ describe('extension has possible header', () => { it('should process raw headered files', async () => { const inputRomFiles = await new ROMScanner(new Options({ input: ['./test/fixtures/roms/headered/*{.a78,.lnx,.nes,.fds,.smc}*'], - }), new ProgressBarFake()).scan(); + }), new ProgressBarFake(), new FileFactory(new FileCache())).scan(); expect(inputRomFiles.length).toBeGreaterThan(0); const processedRomFiles = await new ROMHeaderProcessor(new Options({ commands: ['copy', 'extract'], - }), new ProgressBarFake()).process(inputRomFiles); + }), new ProgressBarFake(), new FileFactory(new FileCache())).process(inputRomFiles); expect(processedRomFiles).toHaveLength(inputRomFiles.length); for (const [idx, processedRomFile] of processedRomFiles.entries()) { @@ -60,12 +62,13 @@ describe('extension has possible header', () => { it('should not process archived headered files if not manipulating', async () => { const inputRomFiles = await new ROMScanner(new Options({ input: ['./test/fixtures/roms/headered/*{.7z,.rar,.zip}'], - }), new ProgressBarFake()).scan(); + }), new ProgressBarFake(), new FileFactory(new FileCache())).scan(); expect(inputRomFiles.length).toBeGreaterThan(0); const processedRomFiles = await new ROMHeaderProcessor( new Options(), new ProgressBarFake(), + new FileFactory(new FileCache()), ).process(inputRomFiles); expect(processedRomFiles).toHaveLength(inputRomFiles.length); @@ -81,13 +84,13 @@ describe('should read file for header', () => { it('should do nothing with headerless files', async () => { const inputRomFiles = await new ROMScanner(new Options({ input: ['./test/fixtures/roms/!(headered){,/}*'], - }), new ProgressBarFake()).scan(); + }), new ProgressBarFake(), new FileFactory(new FileCache())).scan(); expect(inputRomFiles.length).toBeGreaterThan(0); const processedRomFiles = await new ROMHeaderProcessor(new Options({ commands: ['copy', 'extract'], header: '**/*', - }), new ProgressBarFake()).process(inputRomFiles); + }), new ProgressBarFake(), new FileFactory(new FileCache())).process(inputRomFiles); expect(processedRomFiles).toHaveLength(inputRomFiles.length); for (const [idx, processedRomFile] of processedRomFiles.entries()) { @@ -100,13 +103,13 @@ describe('should read file for header', () => { it('should process headered files', async () => { const inputRomFiles = await new ROMScanner(new Options({ input: ['./test/fixtures/roms/headered/!(*{.a78,.lnx,.nes,.fds,.smc}*)'], - }), new ProgressBarFake()).scan(); + }), new ProgressBarFake(), new FileFactory(new FileCache())).scan(); expect(inputRomFiles.length).toBeGreaterThan(0); const processedRomFiles = await new ROMHeaderProcessor(new Options({ commands: ['copy', 'extract'], header: '**/*', - }), new ProgressBarFake()).process(inputRomFiles); + }), new ProgressBarFake(), new FileFactory(new FileCache())).process(inputRomFiles); expect(processedRomFiles).toHaveLength(inputRomFiles.length); for (const [idx, processedRomFile] of processedRomFiles.entries()) { diff --git a/test/modules/romScanner.test.ts b/test/modules/romScanner.test.ts index 376d3bfae..e5331eef1 100644 --- a/test/modules/romScanner.test.ts +++ b/test/modules/romScanner.test.ts @@ -3,13 +3,21 @@ import path from 'node:path'; import Temp from '../../src/globals/temp.js'; import ROMScanner from '../../src/modules/romScanner.js'; +import ArrayPoly from '../../src/polyfill/arrayPoly.js'; import fsPoly from '../../src/polyfill/fsPoly.js'; +import ArchiveEntry from '../../src/types/files/archives/archiveEntry.js'; +import FileCache from '../../src/types/files/fileCache.js'; import { ChecksumBitmask } from '../../src/types/files/fileChecksums.js'; +import FileFactory from '../../src/types/files/fileFactory.js'; import Options, { OptionsProps } from '../../src/types/options.js'; import ProgressBarFake from '../console/progressBarFake.js'; function createRomScanner(input: string[], inputExclude: string[] = []): ROMScanner { - return new ROMScanner(new Options({ input, inputExclude }), new ProgressBarFake()); + return new ROMScanner( + new Options({ input, inputExclude }), + new ProgressBarFake(), + new FileFactory(new FileCache()), + ); } it('should throw on nonexistent paths', async () => { @@ -33,7 +41,7 @@ it('should not throw on bad archives', async () => { describe('multiple files', () => { it('should scan multiple files with no exclusions', async () => { - const expectedRomFiles = 68; + const expectedRomFiles = 94; await expect(createRomScanner(['test/fixtures/roms']).scan()).resolves.toHaveLength(expectedRomFiles); await expect(createRomScanner(['test/fixtures/roms/*', 'test/fixtures/roms/**/*']).scan()).resolves.toHaveLength(expectedRomFiles); await expect(createRomScanner(['test/fixtures/roms/**/*']).scan()).resolves.toHaveLength(expectedRomFiles); @@ -41,22 +49,75 @@ describe('multiple files', () => { }); test.each([ - [{ input: [path.join('test', 'fixtures', 'roms')] }, 104], + [{ input: [path.join('test', 'fixtures', 'roms')] }, 137], [{ input: [path.join('test', 'fixtures', 'roms', '7z')] }, 12], [{ input: [path.join('test', 'fixtures', 'roms', 'gz')] }, 14], [{ input: [path.join('test', 'fixtures', 'roms', 'rar')] }, 12], [{ input: [path.join('test', 'fixtures', 'roms', 'tar')] }, 12], [{ input: [path.join('test', 'fixtures', 'roms', 'zip')] }, 15], ] satisfies [OptionsProps, number][])('should calculate checksums of archives: %s', async (optionsProps, expectedRomFiles) => { - const scannedFiles = await new ROMScanner(new Options(optionsProps), new ProgressBarFake()) - .scan(ChecksumBitmask.CRC32, true); + const checksumBitmask = Object.keys(ChecksumBitmask) + .filter((bitmask): bitmask is keyof typeof ChecksumBitmask => Number.isNaN(Number(bitmask))) + .reduce((allBitmasks, bitmask) => allBitmasks | ChecksumBitmask[bitmask], 0); + const scannedFiles = await new ROMScanner( + new Options(optionsProps), + new ProgressBarFake(), + new FileFactory(new FileCache()), + ).scan(checksumBitmask, true); expect(scannedFiles).toHaveLength(expectedRomFiles); }); + it('should scan quickly', async () => { + const options = new Options({ + input: [path.join('test', 'fixtures', 'roms')], + inputChecksumQuick: true, + }); + + const scannedFiles = await new ROMScanner( + options, + new ProgressBarFake(), + new FileFactory(new FileCache()), + ).scan(ChecksumBitmask.CRC32, false); + + const extensionsWithoutCrc32 = scannedFiles + .filter((file) => file instanceof ArchiveEntry) + .filter((file) => !file.getCrc32()) + .map((file) => { + const match = file.getFilePath().match(/[^.]+((\.[a-zA-Z0-9]+)+)$/); + return match ? match[1] : undefined; + }) + .filter((ext) => ext !== undefined) + .reduce(ArrayPoly.reduceUnique(), []) + .sort(); + expect(extensionsWithoutCrc32).toEqual(['.chd', '.tar.gz']); + + const entriesWithMd5 = scannedFiles + .filter((file) => file instanceof ArchiveEntry) + .filter((file) => file.getMd5()); + expect(entriesWithMd5).toHaveLength(0); + + const extensionsWithSha1 = scannedFiles + .filter((file) => file instanceof ArchiveEntry) + .filter((file) => file.getSha1()) + .map((file) => { + const match = file.getFilePath().match(/[^.]+((\.[a-zA-Z0-9]+)+)$/); + return match ? match[1] : undefined; + }) + .filter((ext) => ext !== undefined) + .reduce(ArrayPoly.reduceUnique(), []) + .sort(); + expect(extensionsWithSha1).toEqual(['.chd']); + + const entriesWithSha256 = scannedFiles + .filter((file) => file instanceof ArchiveEntry) + .filter((file) => file.getSha256()); + expect(entriesWithSha256).toHaveLength(0); + }); + it('should scan multiple files with some file exclusions', async () => { - await expect(createRomScanner(['test/fixtures/roms/**/*'], ['test/fixtures/roms/**/*.rom']).scan()).resolves.toHaveLength(51); - await expect(createRomScanner(['test/fixtures/roms/**/*'], ['test/fixtures/roms/**/*.rom', 'test/fixtures/roms/**/*.rom']).scan()).resolves.toHaveLength(51); - await expect(createRomScanner(['test/fixtures/roms/**/*'], ['test/fixtures/roms/**/*.rom', 'test/fixtures/roms/**/*.zip']).scan()).resolves.toHaveLength(40); + await expect(createRomScanner(['test/fixtures/roms/**/*'], ['test/fixtures/roms/**/*.rom']).scan()).resolves.toHaveLength(77); + await expect(createRomScanner(['test/fixtures/roms/**/*'], ['test/fixtures/roms/**/*.rom', 'test/fixtures/roms/**/*.rom']).scan()).resolves.toHaveLength(77); + await expect(createRomScanner(['test/fixtures/roms/**/*'], ['test/fixtures/roms/**/*.rom', 'test/fixtures/roms/**/*.zip']).scan()).resolves.toHaveLength(66); }); it('should scan multiple files with every file excluded', async () => { diff --git a/test/modules/statusGenerator.test.ts b/test/modules/statusGenerator.test.ts index d7dfb305a..74190227f 100644 --- a/test/modules/statusGenerator.test.ts +++ b/test/modules/statusGenerator.test.ts @@ -216,7 +216,7 @@ describe('toConsole', () => { preferParent: true, }); let map = await candidateGenerator(options, []); - map = await new CandidatePreferer(options, new ProgressBarFake()).prefer(dummyDat, map); + map = new CandidatePreferer(options, new ProgressBarFake()).prefer(dummyDat, map); const datStatus = new StatusGenerator(options, new ProgressBarFake()) .generate(dummyDat, map); expect(stripAnsi(datStatus.toConsole(options))).toEqual('2/5 games, 0/1 BIOSes, 1/1 devices, 2/5 retail releases found'); @@ -234,7 +234,7 @@ describe('toConsole', () => { gameNameSingleRom, gameNameMultipleRoms, ]); - map = await new CandidatePreferer(options, new ProgressBarFake()).prefer(dummyDat, map); + map = new CandidatePreferer(options, new ProgressBarFake()).prefer(dummyDat, map); const datStatus = new StatusGenerator(options, new ProgressBarFake()) .generate(dummyDat, map); expect(stripAnsi(datStatus.toConsole(options))).toEqual('5/5 games, 1/1 BIOSes, 1/1 devices, 5/5 retail releases found'); diff --git a/test/outputFactory.test.ts b/test/outputFactory.test.ts index 2d4d8636f..485c89ed6 100644 --- a/test/outputFactory.test.ts +++ b/test/outputFactory.test.ts @@ -62,8 +62,6 @@ describe('token replacement', () => { test.each([ ['foo/{datName}/bar', path.join('foo', 'DAT _ Name', 'bar', 'Dummy.rom')], ['foo/{datDescription}/bar', path.join('foo', 'DAT _ Description', 'bar', 'Dummy.rom')], - ['root/{datReleaseRegion}', path.join('root', 'USA', 'Dummy.rom')], - ['root/{datReleaseLanguage}', path.join('root', 'EN', 'Dummy.rom')], ])('should replace {dat*}: %s', async (output, expectedPath) => { const options = new Options({ commands: ['copy'], output }); const dat = new LogiqxDAT(new Header({ name: 'DAT / Name', description: 'DAT \\ Description' }), []); @@ -84,9 +82,6 @@ describe('token replacement', () => { ['root/{region}', 'Game (E)', [], path.join('root', 'EUR', 'Dummy.rom')], ['root/{region}', 'Game (Europe)', [], path.join('root', 'EUR', 'Dummy.rom')], ['root/{region}', 'Game', ['EUR'], path.join('root', 'EUR', 'Dummy.rom')], - ['root/{gameRegion}', 'Game', ['EUR', 'JPN'], path.join('root', 'EUR', 'Dummy.rom')], - ['root/{gameRegion}', 'Game', ['JPN'], path.join('root', 'JPN', 'Dummy.rom')], - ['root/{gameRegion}', 'Game', ['JPN', 'EUR'], path.join('root', 'JPN', 'Dummy.rom')], ])('should replace {region}: %s', async (output, gameName, regions, expectedPath) => { const options = new Options({ commands: ['copy'], output }); const dat = new LogiqxDAT(new Header(), []); @@ -110,9 +105,6 @@ describe('token replacement', () => { ['root/{language}', 'Game (E)', [], path.join('root', 'EN', 'Dummy.rom')], ['root/{language}', 'Game (Europe)', [], path.join('root', 'EN', 'Dummy.rom')], ['root/{language}', 'Game', ['EUR'], path.join('root', 'EN', 'Dummy.rom')], - ['root/{gameLanguage}', 'Game', ['EUR', 'JPN'], path.join('root', 'EN', 'Dummy.rom')], - ['root/{gameLanguage}', 'Game', ['JPN'], path.join('root', 'JA', 'Dummy.rom')], - ['root/{gameLanguage}', 'Game', ['JPN', 'EUR'], path.join('root', 'JA', 'Dummy.rom')], ])('should replace {language}: %s', async (output, gameName, regions, expectedPath) => { const options = new Options({ commands: ['copy'], output }); const dat = new LogiqxDAT(new Header(), []); @@ -180,8 +172,8 @@ describe('token replacement', () => { ['Game (Unl)', 'Unlicensed'], // Default ['Game', 'Retail'], - ])('should replace {gameType}: %s', async (gameName, expectedPath) => { - const options = new Options({ commands: ['copy'], output: '{gameType}' }); + ])('should replace {type}: %s', async (gameName, expectedPath) => { + const options = new Options({ commands: ['copy'], output: '{type}' }); const game = new Game({ name: gameName, release: [ @@ -867,12 +859,15 @@ describe('token replacement', () => { describe('should respect "--dir-mirror"', () => { test.each([ - ['', os.devNull], - ['file.rom', path.join(os.devNull, 'file.rom')], ['roms/file.rom', path.join(os.devNull, 'file.rom')], ['roms/subdir/file.rom', path.join(os.devNull, 'subdir', 'file.rom')], ])('option is true: %s', async (filePath, expectedPath) => { - const options = new Options({ commands: ['copy'], output: os.devNull, dirMirror: true }); + const options = new Options({ + commands: ['copy'], + input: ['roms'], + output: os.devNull, + dirMirror: true, + }); const rom = new ROM({ name: path.basename(filePath), size: 0, crc32: '' }); const outputPath = OutputFactory.getPath( diff --git a/test/polyfill/fsPoly.test.ts b/test/polyfill/fsPoly.test.ts index 7a9bd13d4..d085bfffb 100644 --- a/test/polyfill/fsPoly.test.ts +++ b/test/polyfill/fsPoly.test.ts @@ -29,6 +29,104 @@ describe('isDirectory', () => { }); }); +describe('isDirectorySync', () => { + it('should return true for a directory', async () => { + const tempDir = await fsPoly.mkdtemp(Temp.getTempDir()); + expect(fsPoly.isDirectorySync(tempDir)).toEqual(true); + }); + + it('should return false for a file', async () => { + const tempFile = await fsPoly.mktemp(path.join(Temp.getTempDir(), 'temp')); + await fsPoly.touch(tempFile); + expect(fsPoly.isDirectorySync(tempFile)).toEqual(false); + fsPoly.rmSync(tempFile); + }); + + it('should return false for non-existent file', async () => { + const tempFile = await fsPoly.mktemp(path.join(Temp.getTempDir(), 'temp')); + expect(fsPoly.isDirectorySync(tempFile)).toEqual(false); + }); +}); + +describe('isHardlink', () => { + it('should return true for a hardlink', async () => { + const tempFileTarget = await fsPoly.mktemp(path.join(Temp.getTempDir(), 'target')); + const tempFileLink = await fsPoly.mktemp(path.join(Temp.getTempDir(), 'link')); + + try { + await fsPoly.touch(tempFileTarget); + + await fsPoly.hardlink(tempFileTarget, tempFileLink); + await expect(fsPoly.isHardlink(tempFileLink)).resolves.toEqual(true); + await expect(fsPoly.isHardlink(tempFileTarget)).resolves.toEqual(true); + } finally { + await fsPoly.rm(tempFileTarget, { force: true }); + await fsPoly.rm(tempFileLink, { force: true }); + } + }); + + it('should return false for a symlink', async () => { + const tempFileTarget = await fsPoly.mktemp(path.join(Temp.getTempDir(), 'target')); + const tempFileLink = await fsPoly.mktemp(path.join(Temp.getTempDir(), 'link')); + + try { + await fsPoly.touch(tempFileTarget); + + await fsPoly.symlink(tempFileTarget, tempFileLink); + await expect(fsPoly.isHardlink(tempFileLink)).resolves.toEqual(false); + await expect(fsPoly.isHardlink(tempFileTarget)).resolves.toEqual(false); + } finally { + await fsPoly.rm(tempFileTarget, { force: true }); + await fsPoly.rm(tempFileLink, { force: true }); + } + }); + + it('should return false for a file', async () => { + const tempFile = await fsPoly.mktemp(path.join(Temp.getTempDir(), 'temp')); + await fsPoly.touch(tempFile); + await expect(fsPoly.isHardlink(tempFile)).resolves.toEqual(false); + await fsPoly.rm(tempFile); + }); + + it('should return false for non-existent file', async () => { + const tempFile = await fsPoly.mktemp(path.join(Temp.getTempDir(), 'temp')); + await expect(fsPoly.isHardlink(tempFile)).resolves.toEqual(false); + }); +}); + +describe('hardlink', () => { + it('should create a hard link', async () => { + const tempFileTarget = await fsPoly.mktemp(path.join(Temp.getTempDir(), 'target')); + const tempFileLink = await fsPoly.mktemp(path.join(Temp.getTempDir(), 'link')); + + try { + await fsPoly.touch(tempFileTarget); + + await fsPoly.hardlink(tempFileTarget, tempFileLink); + await expect(fsPoly.isHardlink(tempFileLink)).resolves.toEqual(true); + await expect(fsPoly.isHardlink(tempFileTarget)).resolves.toEqual(true); + } finally { + await fsPoly.rm(tempFileTarget, { force: true }); + await fsPoly.rm(tempFileLink, { force: true }); + } + }); + + it('should not overwrite an existing file', async () => { + const tempFileTarget = await fsPoly.mktemp(path.join(Temp.getTempDir(), 'target')); + const tempFileLink = await fsPoly.mktemp(path.join(Temp.getTempDir(), 'link')); + + try { + await fsPoly.touch(tempFileTarget); + await fsPoly.touch(tempFileLink); + + await expect(fsPoly.hardlink(tempFileTarget, tempFileLink)).rejects.toThrow(); + } finally { + await fsPoly.rm(tempFileTarget, { force: true }); + await fsPoly.rm(tempFileLink, { force: true }); + } + }); +}); + describe('isSamba', () => { test.each([ '.', @@ -86,6 +184,45 @@ describe('isSymlink', () => { }); }); +describe('isSymlinkSync', () => { + it('should return false for a hard link', async () => { + const tempFile = await fsPoly.mktemp(path.join(Temp.getTempDir(), 'temp')); + await fsPoly.touch(tempFile); + const tempLink = await fsPoly.mktemp(path.join(Temp.getTempDir(), 'link')); + await fsPoly.hardlink(tempFile, tempLink); + expect(fsPoly.isSymlinkSync(tempLink)).toEqual(false); + await fsPoly.rm(tempLink); + await fsPoly.rm(tempFile); + }); + + it('should return true for a symlink', async () => { + const tempFile = await fsPoly.mktemp(path.join(Temp.getTempDir(), 'temp')); + await fsPoly.touch(tempFile); + const tempLink = await fsPoly.mktemp(path.join(Temp.getTempDir(), 'link')); + await fsPoly.symlink(tempFile, tempLink); + expect(fsPoly.isSymlinkSync(tempLink)).toEqual(true); + await fsPoly.rm(tempLink); + await fsPoly.rm(tempFile); + }); + + it('should return false for a plain directory', async () => { + const tempDir = await fsPoly.mkdtemp(Temp.getTempDir()); + expect(fsPoly.isSymlinkSync(tempDir)).toEqual(false); + }); + + it('should return false for a plain file', async () => { + const tempFile = await fsPoly.mktemp(path.join(Temp.getTempDir(), 'temp')); + await fsPoly.touch(tempFile); + expect(fsPoly.isSymlinkSync(tempFile)).toEqual(false); + await fsPoly.rm(tempFile); + }); + + it('should return false for non-existent file', async () => { + const tempFile = await fsPoly.mktemp(path.join(Temp.getTempDir(), 'temp')); + expect(fsPoly.isSymlinkSync(tempFile)).toEqual(false); + }); +}); + describe('makeLegal', () => { describe('unix', () => { test.each([ @@ -177,6 +314,67 @@ describe('readlink', () => { }); }); +describe('readlinkSync', () => { + it('should throw on hard links', async () => { + const tempFile = await fsPoly.mktemp(path.join(Temp.getTempDir(), 'temp')); + await fsPoly.touch(tempFile); + const tempLink = await fsPoly.mktemp(path.join(Temp.getTempDir(), 'link')); + await fsPoly.hardlink(tempFile, tempLink); + + expect(() => fsPoly.readlinkSync(tempLink)).toThrow(/non-symlink/); + + await fsPoly.rm(tempLink); + await fsPoly.rm(tempFile); + }); + + it('should read absolute symlinks', async () => { + const tempFile = await fsPoly.mktemp(path.join(Temp.getTempDir(), 'temp')); + await fsPoly.touch(tempFile); + const tempLink = await fsPoly.mktemp(path.join(Temp.getTempDir(), 'link')); + const tempFileAbsolute = path.resolve(tempFile); + await fsPoly.symlink(tempFileAbsolute, tempLink); + + const readLink = fsPoly.readlinkSync(tempLink); + expect(readLink).toEqual(tempFileAbsolute); + expect(path.isAbsolute(readLink)).toEqual(true); + + await fsPoly.rm(tempLink); + await fsPoly.rm(tempFile); + }); + + it('should read relative symlinks', async () => { + const tempFile = await fsPoly.mktemp(path.join(Temp.getTempDir(), 'temp')); + await fsPoly.touch(tempFile); + const tempLink = await fsPoly.mktemp(path.join(Temp.getTempDir(), 'link')); + const tempFileRelative = await fsPoly.symlinkRelativePath(tempFile, tempLink); + await fsPoly.symlink(tempFileRelative, tempLink); + + const readLink = fsPoly.readlinkSync(tempLink); + expect(readLink).toEqual(tempFileRelative); + expect(path.isAbsolute(readLink)).toEqual(false); + + await fsPoly.rm(tempLink); + await fsPoly.rm(tempFile); + }); + + it('should throw on plain files', async () => { + const tempFile = await fsPoly.mktemp(path.join(Temp.getTempDir(), 'temp')); + await fsPoly.touch(tempFile); + + expect(() => fsPoly.readlinkSync(tempFile)).toThrow(/non-symlink/); + + await fsPoly.rm(tempFile); + }); + + it('should throw on directories', async () => { + const tempDir = await fsPoly.mkdtemp(Temp.getTempDir()); + + expect(() => fsPoly.readlinkSync(tempDir)).toThrow(/non-symlink/); + + await fsPoly.rm(tempDir); + }); +}); + describe('readlinkResolved', () => { it('should read absolute symlinks', async () => { const tempFile = await fsPoly.mktemp(path.join(Temp.getTempDir(), 'temp')); @@ -210,6 +408,39 @@ describe('readlinkResolved', () => { }); }); +describe('readlinkResolvedSync', () => { + it('should read absolute symlinks', async () => { + const tempFile = await fsPoly.mktemp(path.join(Temp.getTempDir(), 'temp')); + await fsPoly.touch(tempFile); + const tempLink = await fsPoly.mktemp(path.join(Temp.getTempDir(), 'link')); + const tempFileAbsolute = path.resolve(tempFile); + await fsPoly.symlink(tempFileAbsolute, tempLink); + + const readLinkResolved = fsPoly.readlinkResolvedSync(tempLink); + expect(readLinkResolved).toEqual(tempFileAbsolute); + expect(path.isAbsolute(readLinkResolved)).toEqual(true); + + await fsPoly.rm(tempLink); + await fsPoly.rm(tempFile); + }); + + it('should read relative symlinks', async () => { + const tempFile = await fsPoly.mktemp(path.join(Temp.getTempDir(), 'temp')); + await fsPoly.touch(tempFile); + const tempLink = await fsPoly.mktemp(path.join(Temp.getTempDir(), 'link')); + const tempFileRelative = await fsPoly.symlinkRelativePath(tempFile, tempLink); + await fsPoly.symlink(tempFileRelative, tempLink); + + const readLinkResolved = fsPoly.readlinkResolvedSync(tempLink); + expect(readLinkResolved).not.toEqual(tempFileRelative); + expect(readLinkResolved).toEqual(path.resolve(tempFile)); + expect(path.isAbsolute(readLinkResolved)).toEqual(true); + + await fsPoly.rm(tempLink); + await fsPoly.rm(tempFile); + }); +}); + describe('rm', () => { it('should throw on missing file', async () => { const tempFile = await fsPoly.mktemp(path.join(Temp.getTempDir(), 'temp')); @@ -251,6 +482,47 @@ describe('rm', () => { }); }); +describe('rmSync', () => { + it('should throw on missing file', async () => { + const tempFile = await fsPoly.mktemp(path.join(Temp.getTempDir(), 'temp')); + await expect(fsPoly.exists(tempFile)).resolves.toEqual(false); + expect(() => fsPoly.rmSync(tempFile)).toThrow(); + }); + + it('should not throw on forcing missing file', async () => { + const tempFile = await fsPoly.mktemp(path.join(Temp.getTempDir(), 'temp')); + await expect(fsPoly.exists(tempFile)).resolves.toEqual(false); + expect(() => fsPoly.rmSync(tempFile, { force: true })).not.toThrow(); + }); + + it('should delete an existing file', async () => { + const tempFile = await fsPoly.mktemp(path.join(Temp.getTempDir(), 'temp')); + await fsPoly.touch(tempFile); + await expect(fsPoly.exists(tempFile)).resolves.toEqual(true); + fsPoly.rmSync(tempFile); + await expect(fsPoly.exists(tempFile)).resolves.toEqual(false); + }); + + it('should delete an existing directory', async () => { + const tempDir = await fsPoly.mkdtemp(path.join(Temp.getTempDir(), 'temp')); + await expect(fsPoly.exists(tempDir)).resolves.toEqual(true); + fsPoly.rmSync(tempDir); + await expect(fsPoly.exists(tempDir)).resolves.toEqual(false); + }); + + it('should not delete a symlink\'s target', async () => { + const tempFile = await fsPoly.mktemp(path.join(Temp.getTempDir(), 'temp')); + await fsPoly.touch(tempFile); + const tempLink = await fsPoly.mktemp(path.join(Temp.getTempDir(), 'link')); + await fsPoly.symlink(tempFile, tempLink); + await expect(fsPoly.exists(tempLink)).resolves.toEqual(true); + fsPoly.rmSync(tempLink); + await expect(fsPoly.exists(tempLink)).resolves.toEqual(false); + await expect(fsPoly.exists(tempFile)).resolves.toEqual(true); + fsPoly.rmSync(tempFile); + }); +}); + describe('realpath', () => { it('should throw on non-existent path', async () => { const tempFile = await fsPoly.mktemp(path.join(Temp.getTempDir(), 'temp')); @@ -275,17 +547,3 @@ describe('touch', () => { } }); }); - -describe('touchSync', () => { - it('should mkdir and touch', async () => { - const tempDir = await fsPoly.mkdtemp(Temp.getTempDir()); - await fsPoly.rm(tempDir, { recursive: true }); - const tempFile = await fsPoly.mktemp(path.join(tempDir, 'temp')); - try { - fsPoly.touchSync(tempFile); - await expect(fsPoly.exists(tempFile)).resolves.toEqual(true); - } finally { - await fsPoly.rm(tempDir, { recursive: true, force: true }); - } - }); -}); diff --git a/test/polyfill/timePoly.test.ts b/test/polyfill/timePoly.test.ts new file mode 100644 index 000000000..27468e16e --- /dev/null +++ b/test/polyfill/timePoly.test.ts @@ -0,0 +1,14 @@ +import TimePoly from '../../src/polyfill/timePoly.js'; + +describe('hrtimeMillis', () => { + test.each([ + [10], + [100], + [1000], + ])('should calculate the difference for %s ms', async (timeout) => { + const before = TimePoly.hrtimeMillis(); + await new Promise((resolve) => { setTimeout(resolve, timeout); }); + const after = TimePoly.hrtimeMillis(before); + expect(after).toBeGreaterThanOrEqual(timeout); + }); +}); diff --git a/test/types/cache.test.ts b/test/types/cache.test.ts index 37ec6d12b..a83495f36 100644 --- a/test/types/cache.test.ts +++ b/test/types/cache.test.ts @@ -112,21 +112,6 @@ describe('getOrCompute', () => { } expect(computed).toEqual(0); }); - - it('should respect max cache size', async () => { - const maxSize = Math.floor(TEST_CACHE_SIZE / 2); - const cache = new Cache({ maxSize }); - - for (let i = 0; i < maxSize; i += 1) { - await cache.getOrCompute(String(i), () => i); - expect(cache.size()).toEqual(i + 1); - } - - for (let i = maxSize; i < TEST_CACHE_SIZE; i += 1) { - await cache.getOrCompute(String(i), () => i); - expect(cache.size()).toEqual(maxSize); - } - }); }); describe('set', () => { @@ -148,21 +133,6 @@ describe('set', () => { await expect(cache.get(String(i))).resolves.toEqual(i * 2); } }); - - it('should respect max cache size', async () => { - const maxSize = Math.floor(TEST_CACHE_SIZE / 2); - const cache = new Cache({ maxSize }); - - for (let i = 0; i < maxSize; i += 1) { - await cache.set(String(i), i); - expect(cache.size()).toEqual(i + 1); - } - - for (let i = maxSize; i < TEST_CACHE_SIZE; i += 1) { - await cache.set(String(i), i); - expect(cache.size()).toEqual(maxSize); - } - }); }); describe('delete', () => { diff --git a/test/types/dats/dat.test.ts b/test/types/dats/dat.test.ts index 821798b72..45080344e 100644 --- a/test/types/dats/dat.test.ts +++ b/test/types/dats/dat.test.ts @@ -1,5 +1,4 @@ import Game from '../../../src/types/dats/game.js'; -import ClrMamePro from '../../../src/types/dats/logiqx/clrMamePro.js'; import Header from '../../../src/types/dats/logiqx/header.js'; import LogiqxDAT from '../../../src/types/dats/logiqx/logiqxDat.js'; import Machine from '../../../src/types/dats/mame/machine.js'; @@ -22,31 +21,3 @@ describe('getGames', () => { expect(new MameDAT([]).getGames()).toHaveLength(0); }); }); - -describe('isHeadered', () => { - it('should return false for clrmamepro header', () => { - expect(new LogiqxDAT(new Header({ - clrMamePro: new ClrMamePro({ header: 'header' }), - }), []).isHeadered()).toEqual(false); - }); - - test.each([ - 'Nintendo - Nintendo Entertainment System (Headered) (Parent-Clone)', - ])('should return true for headered names: %s', (name) => { - expect(new LogiqxDAT(new Header({ name }), []).isHeadered()).toEqual(true); - }); -}); - -describe('isHeaderless', () => { - it('should return true for clrmamepro header', () => { - expect(new LogiqxDAT(new Header({ - clrMamePro: new ClrMamePro({ header: 'header' }), - }), []).isHeaderless()).toEqual(true); - }); - - test.each([ - 'Nintendo - Nintendo Entertainment System (Headerless) (Parent-Clone)', - ])('should return true for headered names: %s', (name) => { - expect(new LogiqxDAT(new Header({ name }), []).isHeaderless()).toEqual(true); - }); -}); diff --git a/test/types/dats/disk.test.ts b/test/types/dats/disk.test.ts deleted file mode 100644 index cbf3959e3..000000000 --- a/test/types/dats/disk.test.ts +++ /dev/null @@ -1,5 +0,0 @@ -import Disk from '../../../src/types/dats/disk.js'; - -it('should instantiate', () => { - expect(new Disk()).toBeTruthy(); -}); diff --git a/test/types/files/archives/archive.test.ts b/test/types/files/archives/archive.test.ts index 0c38c3ed8..61f86ff6b 100644 --- a/test/types/files/archives/archive.test.ts +++ b/test/types/files/archives/archive.test.ts @@ -4,14 +4,20 @@ import Temp from '../../../../src/globals/temp.js'; import fsPoly from '../../../../src/polyfill/fsPoly.js'; import Archive from '../../../../src/types/files/archives/archive.js'; import ArchiveEntry from '../../../../src/types/files/archives/archiveEntry.js'; -import Gzip from '../../../../src/types/files/archives/gzip.js'; +import Chd from '../../../../src/types/files/archives/chd/chd.js'; +import Cso from '../../../../src/types/files/archives/maxcso/cso.js'; +import Dax from '../../../../src/types/files/archives/maxcso/dax.js'; +import Zso from '../../../../src/types/files/archives/maxcso/zso.js'; +import NkitIso from '../../../../src/types/files/archives/nkitIso.js'; import Rar from '../../../../src/types/files/archives/rar.js'; -import SevenZip from '../../../../src/types/files/archives/sevenZip.js'; +import Gzip from '../../../../src/types/files/archives/sevenZip/gzip.js'; +import SevenZip from '../../../../src/types/files/archives/sevenZip/sevenZip.js'; +import Z from '../../../../src/types/files/archives/sevenZip/z.js'; +import ZipSpanned from '../../../../src/types/files/archives/sevenZip/zipSpanned.js'; +import ZipX from '../../../../src/types/files/archives/sevenZip/zipX.js'; import Tar from '../../../../src/types/files/archives/tar.js'; -import Z from '../../../../src/types/files/archives/z.js'; import Zip from '../../../../src/types/files/archives/zip.js'; -import ZipSpanned from '../../../../src/types/files/archives/zipSpanned.js'; -import ZipX from '../../../../src/types/files/archives/zipX.js'; +import FileCache from '../../../../src/types/files/fileCache.js'; import FileFactory from '../../../../src/types/files/fileFactory.js'; describe('getArchiveEntries', () => { @@ -25,9 +31,15 @@ describe('getArchiveEntries', () => { ...Z.getExtensions(), ...ZipSpanned.getExtensions(), ...ZipX.getExtensions(), + // Compressed images + ...Cso.getExtensions(), + ...Dax.getExtensions(), + ...Zso.getExtensions(), + ...Chd.getExtensions(), + ...NkitIso.getExtensions(), ])])('should throw when the file doesn\'t exist: %s', async (extension) => { const tempFile = (await fsPoly.mktemp(path.join(Temp.getTempDir(), 'file'))) + extension; - await expect(FileFactory.filesFrom(tempFile)).rejects.toThrow(); + await expect(new FileFactory(new FileCache()).filesFrom(tempFile)).rejects.toThrow(); }); test.each([ @@ -55,8 +67,10 @@ describe('getArchiveEntries', () => { ['./test/fixtures/roms/rar/unknown.rar', 'unknown.rom', '377a7727'], ['./test/fixtures/roms/tar/unknown.tar.gz', 'unknown.rom', '377a7727'], ['./test/fixtures/roms/zip/unknown.zip', 'unknown.rom', '377a7727'], + // other + ['./test/fixtures/roms/nkit/5bc2ce5b.nkit.iso', '5bc2ce5b.iso', '5bc2ce5b'], ])('should enumerate the single file archive: %s', async (filePath, expectedEntryPath, expectedCrc) => { - const entries = await FileFactory.filesFrom(filePath); + const entries = await new FileFactory(new FileCache()).filesFrom(filePath); expect(entries).toHaveLength(1); const entry = entries[0]; @@ -70,7 +84,7 @@ describe('getArchiveEntries', () => { ['./test/fixtures/roms/tar/onetwothree.tar.gz', [['1/one.rom', 'f817a89f'], ['2/two.rom', '96170874'], ['3/three.rom', 'ff46c5d8']]], ['./test/fixtures/roms/zip/onetwothree.zip', [['1/one.rom', 'f817a89f'], ['2/two.rom', '96170874'], ['3/three.rom', 'ff46c5d8']]], ])('should enumerate the multi file archive: %s', async (filePath, expectedEntries) => { - const entries = await FileFactory.filesFrom(filePath); + const entries = await new FileFactory(new FileCache()).filesFrom(filePath); expect(entries).toHaveLength(expectedEntries.length); for (const [idx, entry] of entries.entries()) { diff --git a/test/types/files/archives/archiveEntry.test.ts b/test/types/files/archives/archiveEntry.test.ts index 248d8b7cd..cb3d24f5c 100644 --- a/test/types/files/archives/archiveEntry.test.ts +++ b/test/types/files/archives/archiveEntry.test.ts @@ -7,9 +7,10 @@ import bufferPoly from '../../../../src/polyfill/bufferPoly.js'; import fsPoly from '../../../../src/polyfill/fsPoly.js'; import Archive from '../../../../src/types/files/archives/archive.js'; import ArchiveEntry from '../../../../src/types/files/archives/archiveEntry.js'; -import SevenZip from '../../../../src/types/files/archives/sevenZip.js'; +import SevenZip from '../../../../src/types/files/archives/sevenZip/sevenZip.js'; import Zip from '../../../../src/types/files/archives/zip.js'; import File from '../../../../src/types/files/file.js'; +import FileCache from '../../../../src/types/files/fileCache.js'; import { ChecksumBitmask } from '../../../../src/types/files/fileChecksums.js'; import FileFactory from '../../../../src/types/files/fileFactory.js'; import ROMHeader from '../../../../src/types/files/romHeader.js'; @@ -30,24 +31,29 @@ describe('getEntryPath', () => { describe('getSize', () => { describe.each([ + // fizzbuzz ['./test/fixtures/roms/7z/fizzbuzz.7z', 9], ['./test/fixtures/roms/gz/fizzbuzz.gz', 9], ['./test/fixtures/roms/rar/fizzbuzz.rar', 9], ['./test/fixtures/roms/tar/fizzbuzz.tar.gz', 9], ['./test/fixtures/roms/zip/fizzbuzz.zip', 9], + // foobar ['./test/fixtures/roms/7z/foobar.7z', 7], ['./test/fixtures/roms/gz/foobar.gz', 7], ['./test/fixtures/roms/rar/foobar.rar', 7], ['./test/fixtures/roms/tar/foobar.tar.gz', 7], ['./test/fixtures/roms/zip/foobar.zip', 7], + /// loremipsum ['./test/fixtures/roms/7z/loremipsum.7z', 11], ['./test/fixtures/roms/gz/loremipsum.gz', 11], ['./test/fixtures/roms/rar/loremipsum.rar', 11], ['./test/fixtures/roms/tar/loremipsum.tar.gz', 11], ['./test/fixtures/roms/zip/loremipsum.zip', 11], + // other + ['./test/fixtures/roms/nkit/5bc2ce5b.nkit.iso', 1_459_978_240], ])('%s', (filePath, expectedSize) => { it('should get the file\'s size', async () => { - const archiveEntries = await FileFactory.filesFrom(filePath); + const archiveEntries = await new FileFactory(new FileCache()).filesFrom(filePath); expect(archiveEntries).toHaveLength(1); const archiveEntry = archiveEntries[0]; @@ -64,7 +70,7 @@ describe('getSize', () => { const tempLink = path.join(tempDir, `link_${path.basename(filePath)}`); await fsPoly.hardlink(path.resolve(tempFile), tempLink); - const archiveEntries = await FileFactory.filesFrom(tempLink); + const archiveEntries = await new FileFactory(new FileCache()).filesFrom(tempLink); expect(archiveEntries).toHaveLength(1); const archiveEntry = archiveEntries[0]; @@ -80,7 +86,7 @@ describe('getSize', () => { const tempLink = path.join(tempDir, path.basename(filePath)); await fsPoly.symlink(path.resolve(filePath), tempLink); - const archiveEntries = await FileFactory.filesFrom(tempLink); + const archiveEntries = await new FileFactory(new FileCache()).filesFrom(tempLink); expect(archiveEntries).toHaveLength(1); const archiveEntry = archiveEntries[0]; @@ -96,7 +102,7 @@ describe('getSize', () => { const tempLink = path.join(tempDir, path.basename(filePath)); await fsPoly.symlink(await fsPoly.symlinkRelativePath(filePath, tempLink), tempLink); - const archiveEntries = await FileFactory.filesFrom(tempLink); + const archiveEntries = await new FileFactory(new FileCache()).filesFrom(tempLink); expect(archiveEntries).toHaveLength(1); const archiveEntry = archiveEntries[0]; @@ -110,26 +116,32 @@ describe('getSize', () => { describe('getCrc32', () => { test.each([ + // fizzbuzz ['./test/fixtures/roms/7z/fizzbuzz.7z', '370517b5'], ['./test/fixtures/roms/gz/fizzbuzz.gz', '370517b5'], ['./test/fixtures/roms/rar/fizzbuzz.rar', '370517b5'], ['./test/fixtures/roms/tar/fizzbuzz.tar.gz', '370517b5'], ['./test/fixtures/roms/zip/fizzbuzz.zip', '370517b5'], + // foobar ['./test/fixtures/roms/7z/foobar.7z', 'b22c9747'], ['./test/fixtures/roms/gz/foobar.gz', 'b22c9747'], ['./test/fixtures/roms/rar/foobar.rar', 'b22c9747'], ['./test/fixtures/roms/tar/foobar.tar.gz', 'b22c9747'], ['./test/fixtures/roms/zip/foobar.zip', 'b22c9747'], + // loremipsum ['./test/fixtures/roms/7z/loremipsum.7z', '70856527'], ['./test/fixtures/roms/gz/loremipsum.gz', '70856527'], ['./test/fixtures/roms/rar/loremipsum.rar', '70856527'], ['./test/fixtures/roms/tar/loremipsum.tar.gz', '70856527'], ['./test/fixtures/roms/zip/loremipsum.zip', '70856527'], + // headered ['./test/fixtures/roms/headered/diagnostic_test_cartridge.a78.7z', 'f6cc9b1c'], ['./test/fixtures/roms/headered/fds_joypad_test.fds.zip', '1e58456d'], ['./test/fixtures/roms/headered/LCDTestROM.lnx.rar', '2d251538'], + // other + ['./test/fixtures/roms/nkit/5bc2ce5b.nkit.iso', '5bc2ce5b'], ])('should hash the full archive entry: %s', async (filePath, expectedCrc) => { - const archiveEntries = await FileFactory.filesFrom(filePath); + const archiveEntries = await new FileFactory(new FileCache()).filesFrom(filePath); expect(archiveEntries).toHaveLength(1); const archiveEntry = archiveEntries[0]; @@ -146,26 +158,32 @@ describe('getCrc32', () => { describe('getCrc32WithoutHeader', () => { test.each([ + // fizzbuzz ['./test/fixtures/roms/7z/fizzbuzz.7z', '370517b5'], ['./test/fixtures/roms/gz/fizzbuzz.gz', '370517b5'], ['./test/fixtures/roms/rar/fizzbuzz.rar', '370517b5'], ['./test/fixtures/roms/tar/fizzbuzz.tar.gz', '370517b5'], ['./test/fixtures/roms/zip/fizzbuzz.zip', '370517b5'], + // foobar ['./test/fixtures/roms/7z/foobar.7z', 'b22c9747'], ['./test/fixtures/roms/gz/foobar.gz', 'b22c9747'], ['./test/fixtures/roms/rar/foobar.rar', 'b22c9747'], ['./test/fixtures/roms/tar/foobar.tar.gz', 'b22c9747'], ['./test/fixtures/roms/zip/foobar.zip', 'b22c9747'], + // loremipsum ['./test/fixtures/roms/7z/loremipsum.7z', '70856527'], ['./test/fixtures/roms/gz/loremipsum.gz', '70856527'], ['./test/fixtures/roms/rar/loremipsum.rar', '70856527'], ['./test/fixtures/roms/tar/loremipsum.tar.gz', '70856527'], ['./test/fixtures/roms/zip/loremipsum.zip', '70856527'], + // headered ['./test/fixtures/roms/headered/diagnostic_test_cartridge.a78.7z', 'f6cc9b1c'], ['./test/fixtures/roms/headered/fds_joypad_test.fds.zip', '1e58456d'], ['./test/fixtures/roms/headered/LCDTestROM.lnx.rar', '2d251538'], + // other + ['./test/fixtures/roms/nkit/5bc2ce5b.nkit.iso', '5bc2ce5b'], ])('should hash the full archive entry when no header given: %s', async (filePath, expectedCrc) => { - const archiveEntries = await FileFactory.filesFrom(filePath); + const archiveEntries = await new FileFactory(new FileCache()).filesFrom(filePath); expect(archiveEntries).toHaveLength(1); const archiveEntry = archiveEntries[0]; @@ -184,7 +202,7 @@ describe('getCrc32WithoutHeader', () => { ['./test/fixtures/roms/headered/fds_joypad_test.fds.zip', '3ecbac61'], ['./test/fixtures/roms/headered/LCDTestROM.lnx.rar', '42583855'], ])('should hash the archive entry without the header when header is given and present in file: %s', async (filePath, expectedCrc) => { - const archiveEntries = await FileFactory.filesFrom(filePath); + const archiveEntries = await new FileFactory(new FileCache()).filesFrom(filePath); expect(archiveEntries).toHaveLength(1); const archiveEntry = await archiveEntries[0].withFileHeader( ROMHeader.headerFromFilename(archiveEntries[0].getExtractedFilePath()) as ROMHeader, @@ -203,26 +221,33 @@ describe('getCrc32WithoutHeader', () => { describe('getMd5', () => { test.each([ + // fizzbuzz ['./test/fixtures/roms/7z/fizzbuzz.7z', 'cbe8410861130a91609295349918c2c2'], ['./test/fixtures/roms/gz/fizzbuzz.gz', 'cbe8410861130a91609295349918c2c2'], ['./test/fixtures/roms/rar/fizzbuzz.rar', 'cbe8410861130a91609295349918c2c2'], ['./test/fixtures/roms/tar/fizzbuzz.tar.gz', 'cbe8410861130a91609295349918c2c2'], ['./test/fixtures/roms/zip/fizzbuzz.zip', 'cbe8410861130a91609295349918c2c2'], + // foobar ['./test/fixtures/roms/7z/foobar.7z', '14758f1afd44c09b7992073ccf00b43d'], ['./test/fixtures/roms/gz/foobar.gz', '14758f1afd44c09b7992073ccf00b43d'], ['./test/fixtures/roms/rar/foobar.rar', '14758f1afd44c09b7992073ccf00b43d'], ['./test/fixtures/roms/tar/foobar.tar.gz', '14758f1afd44c09b7992073ccf00b43d'], ['./test/fixtures/roms/zip/foobar.zip', '14758f1afd44c09b7992073ccf00b43d'], + // loremipsum ['./test/fixtures/roms/7z/loremipsum.7z', 'fffcb698d88fbc9425a636ba7e4712a3'], ['./test/fixtures/roms/gz/loremipsum.gz', 'fffcb698d88fbc9425a636ba7e4712a3'], ['./test/fixtures/roms/rar/loremipsum.rar', 'fffcb698d88fbc9425a636ba7e4712a3'], ['./test/fixtures/roms/tar/loremipsum.tar.gz', 'fffcb698d88fbc9425a636ba7e4712a3'], ['./test/fixtures/roms/zip/loremipsum.zip', 'fffcb698d88fbc9425a636ba7e4712a3'], + // headered ['./test/fixtures/roms/headered/diagnostic_test_cartridge.a78.7z', '8df5c10517da1c001072fd5526e4a683'], ['./test/fixtures/roms/headered/fds_joypad_test.fds.zip', '74362ca7d47e67e2d3e62f6283ecf879'], ['./test/fixtures/roms/headered/LCDTestROM.lnx.rar', '1ea102719a2fc3b767df0d2d367a8371'], + // other + ['./test/fixtures/roms/nkit/5bc2ce5b.nkit.iso', undefined], ])('should hash the full archive entry: %s', async (filePath, expectedMd5) => { - const archiveEntries = await FileFactory.filesFrom(filePath, ChecksumBitmask.MD5); + const archiveEntries = await new FileFactory(new FileCache()) + .filesFrom(filePath, ChecksumBitmask.MD5); expect(archiveEntries).toHaveLength(1); const archiveEntry = archiveEntries[0]; @@ -238,26 +263,33 @@ describe('getMd5', () => { describe('getMd5WithoutHeader', () => { test.each([ + // fizzbuzz ['./test/fixtures/roms/7z/fizzbuzz.7z', 'cbe8410861130a91609295349918c2c2'], ['./test/fixtures/roms/gz/fizzbuzz.gz', 'cbe8410861130a91609295349918c2c2'], ['./test/fixtures/roms/rar/fizzbuzz.rar', 'cbe8410861130a91609295349918c2c2'], ['./test/fixtures/roms/tar/fizzbuzz.tar.gz', 'cbe8410861130a91609295349918c2c2'], ['./test/fixtures/roms/zip/fizzbuzz.zip', 'cbe8410861130a91609295349918c2c2'], + // foobar ['./test/fixtures/roms/7z/foobar.7z', '14758f1afd44c09b7992073ccf00b43d'], ['./test/fixtures/roms/gz/foobar.gz', '14758f1afd44c09b7992073ccf00b43d'], ['./test/fixtures/roms/rar/foobar.rar', '14758f1afd44c09b7992073ccf00b43d'], ['./test/fixtures/roms/tar/foobar.tar.gz', '14758f1afd44c09b7992073ccf00b43d'], ['./test/fixtures/roms/zip/foobar.zip', '14758f1afd44c09b7992073ccf00b43d'], + // loremipsum ['./test/fixtures/roms/7z/loremipsum.7z', 'fffcb698d88fbc9425a636ba7e4712a3'], ['./test/fixtures/roms/gz/loremipsum.gz', 'fffcb698d88fbc9425a636ba7e4712a3'], ['./test/fixtures/roms/rar/loremipsum.rar', 'fffcb698d88fbc9425a636ba7e4712a3'], ['./test/fixtures/roms/tar/loremipsum.tar.gz', 'fffcb698d88fbc9425a636ba7e4712a3'], ['./test/fixtures/roms/zip/loremipsum.zip', 'fffcb698d88fbc9425a636ba7e4712a3'], + // headered ['./test/fixtures/roms/headered/diagnostic_test_cartridge.a78.7z', '8df5c10517da1c001072fd5526e4a683'], ['./test/fixtures/roms/headered/fds_joypad_test.fds.zip', '74362ca7d47e67e2d3e62f6283ecf879'], ['./test/fixtures/roms/headered/LCDTestROM.lnx.rar', '1ea102719a2fc3b767df0d2d367a8371'], + // other + ['./test/fixtures/roms/nkit/5bc2ce5b.nkit.iso', undefined], ])('should hash the full archive entry when no header given: %s', async (filePath, expectedMd5) => { - const archiveEntries = await FileFactory.filesFrom(filePath, ChecksumBitmask.MD5); + const archiveEntries = await new FileFactory(new FileCache()) + .filesFrom(filePath, ChecksumBitmask.MD5); expect(archiveEntries).toHaveLength(1); const archiveEntry = archiveEntries[0]; @@ -275,7 +307,8 @@ describe('getMd5WithoutHeader', () => { ['./test/fixtures/roms/headered/fds_joypad_test.fds.zip', '26df56a7e5b096577338bcc4c334ec7d'], ['./test/fixtures/roms/headered/LCDTestROM.lnx.rar', '294a07b07ce67a9b492e4b6e77d6d2f7'], ])('should hash the archive entry without the header when header is given and present in file: %s', async (filePath, expectedMd5) => { - const archiveEntries = await FileFactory.filesFrom(filePath, ChecksumBitmask.MD5); + const archiveEntries = await new FileFactory(new FileCache()) + .filesFrom(filePath, ChecksumBitmask.MD5); expect(archiveEntries).toHaveLength(1); const archiveEntry = await archiveEntries[0].withFileHeader( ROMHeader.headerFromFilename(archiveEntries[0].getExtractedFilePath()) as ROMHeader, @@ -294,26 +327,33 @@ describe('getMd5WithoutHeader', () => { describe('getSha1', () => { test.each([ + // fizzbuzz ['./test/fixtures/roms/7z/fizzbuzz.7z', '5a316d9f0e06964d94cdd62a933803d7147ddadb'], ['./test/fixtures/roms/gz/fizzbuzz.gz', '5a316d9f0e06964d94cdd62a933803d7147ddadb'], ['./test/fixtures/roms/rar/fizzbuzz.rar', '5a316d9f0e06964d94cdd62a933803d7147ddadb'], ['./test/fixtures/roms/tar/fizzbuzz.tar.gz', '5a316d9f0e06964d94cdd62a933803d7147ddadb'], ['./test/fixtures/roms/zip/fizzbuzz.zip', '5a316d9f0e06964d94cdd62a933803d7147ddadb'], + // foobar ['./test/fixtures/roms/7z/foobar.7z', '988881adc9fc3655077dc2d4d757d480b5ea0e11'], ['./test/fixtures/roms/gz/foobar.gz', '988881adc9fc3655077dc2d4d757d480b5ea0e11'], ['./test/fixtures/roms/rar/foobar.rar', '988881adc9fc3655077dc2d4d757d480b5ea0e11'], ['./test/fixtures/roms/tar/foobar.tar.gz', '988881adc9fc3655077dc2d4d757d480b5ea0e11'], ['./test/fixtures/roms/zip/foobar.zip', '988881adc9fc3655077dc2d4d757d480b5ea0e11'], + // loremipsum ['./test/fixtures/roms/7z/loremipsum.7z', '1d913738eb363a4056c19e158aa81189a1eb7a55'], ['./test/fixtures/roms/gz/loremipsum.gz', '1d913738eb363a4056c19e158aa81189a1eb7a55'], ['./test/fixtures/roms/rar/loremipsum.rar', '1d913738eb363a4056c19e158aa81189a1eb7a55'], ['./test/fixtures/roms/tar/loremipsum.tar.gz', '1d913738eb363a4056c19e158aa81189a1eb7a55'], ['./test/fixtures/roms/zip/loremipsum.zip', '1d913738eb363a4056c19e158aa81189a1eb7a55'], + // headered ['./test/fixtures/roms/headered/diagnostic_test_cartridge.a78.7z', 'f1da35db85f99db8803ae088499cb9dc148fe0c6'], ['./test/fixtures/roms/headered/fds_joypad_test.fds.zip', 'f7b31cc2b6ef841cc51df1711462c07b0994db98'], ['./test/fixtures/roms/headered/LCDTestROM.lnx.rar', '3882fcc1c94579a47a213224c572c006d62867f0'], + // other + ['./test/fixtures/roms/nkit/5bc2ce5b.nkit.iso', undefined], ])('should hash the full archive entry: %s', async (filePath, expectedSha1) => { - const archiveEntries = await FileFactory.filesFrom(filePath, ChecksumBitmask.SHA1); + const archiveEntries = await new FileFactory(new FileCache()) + .filesFrom(filePath, ChecksumBitmask.SHA1); expect(archiveEntries).toHaveLength(1); const archiveEntry = archiveEntries[0]; @@ -329,26 +369,33 @@ describe('getSha1', () => { describe('getSha1WithoutHeader', () => { test.each([ + // fizzbuzz ['./test/fixtures/roms/7z/fizzbuzz.7z', '5a316d9f0e06964d94cdd62a933803d7147ddadb'], ['./test/fixtures/roms/gz/fizzbuzz.gz', '5a316d9f0e06964d94cdd62a933803d7147ddadb'], ['./test/fixtures/roms/rar/fizzbuzz.rar', '5a316d9f0e06964d94cdd62a933803d7147ddadb'], ['./test/fixtures/roms/tar/fizzbuzz.tar.gz', '5a316d9f0e06964d94cdd62a933803d7147ddadb'], ['./test/fixtures/roms/zip/fizzbuzz.zip', '5a316d9f0e06964d94cdd62a933803d7147ddadb'], + // foobar ['./test/fixtures/roms/7z/foobar.7z', '988881adc9fc3655077dc2d4d757d480b5ea0e11'], ['./test/fixtures/roms/gz/foobar.gz', '988881adc9fc3655077dc2d4d757d480b5ea0e11'], ['./test/fixtures/roms/rar/foobar.rar', '988881adc9fc3655077dc2d4d757d480b5ea0e11'], ['./test/fixtures/roms/tar/foobar.tar.gz', '988881adc9fc3655077dc2d4d757d480b5ea0e11'], ['./test/fixtures/roms/zip/foobar.zip', '988881adc9fc3655077dc2d4d757d480b5ea0e11'], + // loremipsum ['./test/fixtures/roms/7z/loremipsum.7z', '1d913738eb363a4056c19e158aa81189a1eb7a55'], ['./test/fixtures/roms/gz/loremipsum.gz', '1d913738eb363a4056c19e158aa81189a1eb7a55'], ['./test/fixtures/roms/rar/loremipsum.rar', '1d913738eb363a4056c19e158aa81189a1eb7a55'], ['./test/fixtures/roms/tar/loremipsum.tar.gz', '1d913738eb363a4056c19e158aa81189a1eb7a55'], ['./test/fixtures/roms/zip/loremipsum.zip', '1d913738eb363a4056c19e158aa81189a1eb7a55'], + // headered ['./test/fixtures/roms/headered/diagnostic_test_cartridge.a78.7z', 'f1da35db85f99db8803ae088499cb9dc148fe0c6'], ['./test/fixtures/roms/headered/fds_joypad_test.fds.zip', 'f7b31cc2b6ef841cc51df1711462c07b0994db98'], ['./test/fixtures/roms/headered/LCDTestROM.lnx.rar', '3882fcc1c94579a47a213224c572c006d62867f0'], + // other + ['./test/fixtures/roms/nkit/5bc2ce5b.nkit.iso', undefined], ])('should hash the full archive entry when no header given: %s', async (filePath, expectedSha1) => { - const archiveEntries = await FileFactory.filesFrom(filePath, ChecksumBitmask.SHA1); + const archiveEntries = await new FileFactory(new FileCache()) + .filesFrom(filePath, ChecksumBitmask.SHA1); expect(archiveEntries).toHaveLength(1); const archiveEntry = archiveEntries[0]; @@ -366,7 +413,8 @@ describe('getSha1WithoutHeader', () => { ['./test/fixtures/roms/headered/fds_joypad_test.fds.zip', '7b6bd1a69bbc5d8121c72dd1eedfb6752fe11787'], ['./test/fixtures/roms/headered/LCDTestROM.lnx.rar', 'e2901046126153b318a09cc1476eec8afff0b698'], ])('should hash the archive entry without the header when header is given and present in file: %s', async (filePath, expectedSha1) => { - const archiveEntries = await FileFactory.filesFrom(filePath, ChecksumBitmask.SHA1); + const archiveEntries = await new FileFactory(new FileCache()) + .filesFrom(filePath, ChecksumBitmask.SHA1); expect(archiveEntries).toHaveLength(1); const archiveEntry = await archiveEntries[0].withFileHeader( ROMHeader.headerFromFilename(archiveEntries[0].getExtractedFilePath()) as ROMHeader, @@ -385,26 +433,33 @@ describe('getSha1WithoutHeader', () => { describe('getSha256', () => { test.each([ + // fizzbuzz ['./test/fixtures/roms/7z/fizzbuzz.7z', '6e809804766eaa4dd42a2607b789f3e4e5d32fc321ba8dd3ef39ddc1ea2888e9'], ['./test/fixtures/roms/gz/fizzbuzz.gz', '6e809804766eaa4dd42a2607b789f3e4e5d32fc321ba8dd3ef39ddc1ea2888e9'], ['./test/fixtures/roms/rar/fizzbuzz.rar', '6e809804766eaa4dd42a2607b789f3e4e5d32fc321ba8dd3ef39ddc1ea2888e9'], ['./test/fixtures/roms/tar/fizzbuzz.tar.gz', '6e809804766eaa4dd42a2607b789f3e4e5d32fc321ba8dd3ef39ddc1ea2888e9'], ['./test/fixtures/roms/zip/fizzbuzz.zip', '6e809804766eaa4dd42a2607b789f3e4e5d32fc321ba8dd3ef39ddc1ea2888e9'], + // foobar ['./test/fixtures/roms/7z/foobar.7z', 'aec070645fe53ee3b3763059376134f058cc337247c978add178b6ccdfb0019f'], ['./test/fixtures/roms/gz/foobar.gz', 'aec070645fe53ee3b3763059376134f058cc337247c978add178b6ccdfb0019f'], ['./test/fixtures/roms/rar/foobar.rar', 'aec070645fe53ee3b3763059376134f058cc337247c978add178b6ccdfb0019f'], ['./test/fixtures/roms/tar/foobar.tar.gz', 'aec070645fe53ee3b3763059376134f058cc337247c978add178b6ccdfb0019f'], ['./test/fixtures/roms/zip/foobar.zip', 'aec070645fe53ee3b3763059376134f058cc337247c978add178b6ccdfb0019f'], + // loremipsum ['./test/fixtures/roms/7z/loremipsum.7z', '9d0dc61fa60a12a9613cc32fa43fc85bea343ec3a25f27d10ed81a7f0b9ec278'], ['./test/fixtures/roms/gz/loremipsum.gz', '9d0dc61fa60a12a9613cc32fa43fc85bea343ec3a25f27d10ed81a7f0b9ec278'], ['./test/fixtures/roms/rar/loremipsum.rar', '9d0dc61fa60a12a9613cc32fa43fc85bea343ec3a25f27d10ed81a7f0b9ec278'], ['./test/fixtures/roms/tar/loremipsum.tar.gz', '9d0dc61fa60a12a9613cc32fa43fc85bea343ec3a25f27d10ed81a7f0b9ec278'], ['./test/fixtures/roms/zip/loremipsum.zip', '9d0dc61fa60a12a9613cc32fa43fc85bea343ec3a25f27d10ed81a7f0b9ec278'], + // headered ['./test/fixtures/roms/headered/diagnostic_test_cartridge.a78.7z', '38c6f2411d1f968a96fb85aa5202283dd53ac1bdfe27b233af00b7e0303afabf'], ['./test/fixtures/roms/headered/fds_joypad_test.fds.zip', 'be4bec168f7d9397454f3a0df761ed9359e82a1f98896756b6596023611fa6c1'], ['./test/fixtures/roms/headered/LCDTestROM.lnx.rar', 'c83ea05dc94aa8e158ecdbf84af91d971574712d73e0ea82f25dee7eaf88a9d4'], + // other + ['./test/fixtures/roms/nkit/5bc2ce5b.nkit.iso', undefined], ])('should hash the full archive entry: %s', async (filePath, expectedSha256) => { - const archiveEntries = await FileFactory.filesFrom(filePath, ChecksumBitmask.SHA256); + const archiveEntries = await new FileFactory(new FileCache()) + .filesFrom(filePath, ChecksumBitmask.SHA256); expect(archiveEntries).toHaveLength(1); const archiveEntry = archiveEntries[0]; @@ -420,26 +475,33 @@ describe('getSha256', () => { describe('getSha256WithoutHeader', () => { test.each([ + // fizzbuzz ['./test/fixtures/roms/7z/fizzbuzz.7z', '6e809804766eaa4dd42a2607b789f3e4e5d32fc321ba8dd3ef39ddc1ea2888e9'], ['./test/fixtures/roms/gz/fizzbuzz.gz', '6e809804766eaa4dd42a2607b789f3e4e5d32fc321ba8dd3ef39ddc1ea2888e9'], ['./test/fixtures/roms/rar/fizzbuzz.rar', '6e809804766eaa4dd42a2607b789f3e4e5d32fc321ba8dd3ef39ddc1ea2888e9'], ['./test/fixtures/roms/tar/fizzbuzz.tar.gz', '6e809804766eaa4dd42a2607b789f3e4e5d32fc321ba8dd3ef39ddc1ea2888e9'], ['./test/fixtures/roms/zip/fizzbuzz.zip', '6e809804766eaa4dd42a2607b789f3e4e5d32fc321ba8dd3ef39ddc1ea2888e9'], + // foobar ['./test/fixtures/roms/7z/foobar.7z', 'aec070645fe53ee3b3763059376134f058cc337247c978add178b6ccdfb0019f'], ['./test/fixtures/roms/gz/foobar.gz', 'aec070645fe53ee3b3763059376134f058cc337247c978add178b6ccdfb0019f'], ['./test/fixtures/roms/rar/foobar.rar', 'aec070645fe53ee3b3763059376134f058cc337247c978add178b6ccdfb0019f'], ['./test/fixtures/roms/tar/foobar.tar.gz', 'aec070645fe53ee3b3763059376134f058cc337247c978add178b6ccdfb0019f'], ['./test/fixtures/roms/zip/foobar.zip', 'aec070645fe53ee3b3763059376134f058cc337247c978add178b6ccdfb0019f'], + // loremipsum ['./test/fixtures/roms/7z/loremipsum.7z', '9d0dc61fa60a12a9613cc32fa43fc85bea343ec3a25f27d10ed81a7f0b9ec278'], ['./test/fixtures/roms/gz/loremipsum.gz', '9d0dc61fa60a12a9613cc32fa43fc85bea343ec3a25f27d10ed81a7f0b9ec278'], ['./test/fixtures/roms/rar/loremipsum.rar', '9d0dc61fa60a12a9613cc32fa43fc85bea343ec3a25f27d10ed81a7f0b9ec278'], ['./test/fixtures/roms/tar/loremipsum.tar.gz', '9d0dc61fa60a12a9613cc32fa43fc85bea343ec3a25f27d10ed81a7f0b9ec278'], ['./test/fixtures/roms/zip/loremipsum.zip', '9d0dc61fa60a12a9613cc32fa43fc85bea343ec3a25f27d10ed81a7f0b9ec278'], + // headered ['./test/fixtures/roms/headered/diagnostic_test_cartridge.a78.7z', '38c6f2411d1f968a96fb85aa5202283dd53ac1bdfe27b233af00b7e0303afabf'], ['./test/fixtures/roms/headered/fds_joypad_test.fds.zip', 'be4bec168f7d9397454f3a0df761ed9359e82a1f98896756b6596023611fa6c1'], ['./test/fixtures/roms/headered/LCDTestROM.lnx.rar', 'c83ea05dc94aa8e158ecdbf84af91d971574712d73e0ea82f25dee7eaf88a9d4'], + // other + ['./test/fixtures/roms/nkit/5bc2ce5b.nkit.iso', undefined], ])('should hash the full archive entry when no header given: %s', async (filePath, expectedSha256) => { - const archiveEntries = await FileFactory.filesFrom(filePath, ChecksumBitmask.SHA256); + const archiveEntries = await new FileFactory(new FileCache()) + .filesFrom(filePath, ChecksumBitmask.SHA256); expect(archiveEntries).toHaveLength(1); const archiveEntry = archiveEntries[0]; @@ -457,7 +519,8 @@ describe('getSha256WithoutHeader', () => { ['./test/fixtures/roms/headered/fds_joypad_test.fds.zip', '29e56794d15ccaa79e48ec0c80004f8745cfb116cce43b99435ae8790e79c327'], ['./test/fixtures/roms/headered/LCDTestROM.lnx.rar', '65da30d4d2b210d9ab0b634deb3f6f8ee38af9a338b2f9bedd4379bacfb2b07d'], ])('should hash the archive entry without the header when header is given and present in file: %s', async (filePath, expectedSha256) => { - const archiveEntries = await FileFactory.filesFrom(filePath, ChecksumBitmask.SHA256); + const archiveEntries = await new FileFactory(new FileCache()) + .filesFrom(filePath, ChecksumBitmask.SHA256); expect(archiveEntries).toHaveLength(1); const archiveEntry = await archiveEntries[0].withFileHeader( ROMHeader.headerFromFilename(archiveEntries[0].getExtractedFilePath()) as ROMHeader, @@ -485,7 +548,7 @@ describe('extractEntryToFile', () => { './test/fixtures/roms/tar', './test/fixtures/roms/zip', ], - }), new ProgressBarFake()).scan(); + }), new ProgressBarFake(), new FileFactory(new FileCache())).scan(); const archives = archiveEntries .filter((entry): entry is ArchiveEntry => entry instanceof ArchiveEntry) .map((entry) => entry.getArchive()) @@ -509,7 +572,7 @@ describe('copyToTempFile', () => { './test/fixtures/roms/tar', './test/fixtures/roms/zip', ], - }), new ProgressBarFake()).scan(); + }), new ProgressBarFake(), new FileFactory(new FileCache())).scan(); expect(archiveEntries).toHaveLength(37); const temp = await fsPoly.mkdtemp(Temp.getTempDir()); @@ -534,7 +597,7 @@ describe('createReadStream', () => { './test/fixtures/roms/tar', './test/fixtures/roms/zip', ], - }), new ProgressBarFake()).scan(); + }), new ProgressBarFake(), new FileFactory(new FileCache())).scan(); expect(archiveEntries).toHaveLength(37); const temp = await fsPoly.mkdtemp(Temp.getTempDir()); diff --git a/test/types/files/archives/zip.test.ts b/test/types/files/archives/zip.test.ts index ed1dbea1a..c3d12900a 100644 --- a/test/types/files/archives/zip.test.ts +++ b/test/types/files/archives/zip.test.ts @@ -6,6 +6,7 @@ import fsPoly from '../../../../src/polyfill/fsPoly.js'; import ArchiveEntry from '../../../../src/types/files/archives/archiveEntry.js'; import Zip from '../../../../src/types/files/archives/zip.js'; import File from '../../../../src/types/files/file.js'; +import FileCache from '../../../../src/types/files/fileCache.js'; import FileFactory from '../../../../src/types/files/fileFactory.js'; import Options from '../../../../src/types/options.js'; import ProgressBarFake from '../../../console/progressBarFake.js'; @@ -13,7 +14,7 @@ import ProgressBarFake from '../../../console/progressBarFake.js'; async function findRoms(input: string): Promise { return new ROMScanner(new Options({ input: [input], - }), new ProgressBarFake()).scan(); + }), new ProgressBarFake(), new FileFactory(new FileCache())).scan(); } describe('createArchive', () => { @@ -37,7 +38,7 @@ describe('createArchive', () => { await fsPoly.copyFile(rom.getFilePath(), tempFilePath); // And a candidate is partially generated for that file - const tempFiles = await FileFactory.filesFrom(tempFilePath); + const tempFiles = await new FileFactory(new FileCache()).filesFrom(tempFilePath); const inputToOutput = await Promise.all(tempFiles.map(async (tempFile) => { const archiveEntry = await ArchiveEntry.entryOf({ ...tempFile, diff --git a/test/types/files/file.test.ts b/test/types/files/file.test.ts index 512f66278..32c90d16c 100644 --- a/test/types/files/file.test.ts +++ b/test/types/files/file.test.ts @@ -9,7 +9,9 @@ import ArchiveEntry from '../../../src/types/files/archives/archiveEntry.js'; import ArchiveFile from '../../../src/types/files/archives/archiveFile.js'; import Zip from '../../../src/types/files/archives/zip.js'; import File from '../../../src/types/files/file.js'; +import FileCache from '../../../src/types/files/fileCache.js'; import { ChecksumBitmask } from '../../../src/types/files/fileChecksums.js'; +import FileFactory from '../../../src/types/files/fileFactory.js'; import ROMHeader from '../../../src/types/files/romHeader.js'; import Options from '../../../src/types/options.js'; import IPSPatch from '../../../src/types/patches/ipsPatch.js'; @@ -49,7 +51,7 @@ describe('getSize', () => { [10_000], [1_000_000], ])('%s', (size) => { - it('should get the file\'s size: %s', async () => { + it('should get the file\'s size', async () => { const tempDir = await fsPoly.mkdtemp(Temp.getTempDir()); try { const tempFile = path.resolve(await fsPoly.mktemp(path.join(tempDir, 'file'))); @@ -343,7 +345,7 @@ describe('copyToTempFile', () => { it('should do nothing with no archive entry path', async () => { const raws = await new ROMScanner(new Options({ input: ['./test/fixtures/roms/raw'], - }), new ProgressBarFake()).scan(); + }), new ProgressBarFake(), new FileFactory(new FileCache())).scan(); expect(raws).toHaveLength(10); const temp = await fsPoly.mkdtemp(Temp.getTempDir()); @@ -361,7 +363,7 @@ describe('createReadStream', () => { it('should do nothing with no archive entry path', async () => { const raws = await new ROMScanner(new Options({ input: ['./test/fixtures/roms/raw/!(empty).*'], - }), new ProgressBarFake()).scan(); + }), new ProgressBarFake(), new FileFactory(new FileCache())).scan(); expect(raws).toHaveLength(9); const temp = await fsPoly.mkdtemp(Temp.getTempDir()); diff --git a/test/types/files/fileCache.test.ts b/test/types/files/fileCache.test.ts index 892551f4c..38acc4d09 100644 --- a/test/types/files/fileCache.test.ts +++ b/test/types/files/fileCache.test.ts @@ -9,13 +9,14 @@ import { ChecksumBitmask } from '../../../src/types/files/fileChecksums.js'; describe('loadFile', () => { it('should load after saving', async () => { const tempCache = await FsPoly.mktemp(path.join(Temp.getTempDir(), 'cache')); - await FileCache.loadFile(tempCache); + const fileCache = new FileCache(); + await fileCache.loadFile(tempCache); // Compute some values - await FileCache.getOrComputeFileChecksums(path.join('test', 'fixtures', 'roms', 'raw', 'fizzbuzz.nes'), ChecksumBitmask.CRC32); - await FileCache.getOrComputeArchiveChecksums(new Zip(path.join('test', 'fixtures', 'roms', 'zip', 'foobar.zip')), ChecksumBitmask.CRC32); + await fileCache.getOrComputeFileChecksums(path.join('test', 'fixtures', 'roms', 'raw', 'fizzbuzz.nes'), ChecksumBitmask.CRC32); + await fileCache.getOrComputeArchiveChecksums(new Zip(path.join('test', 'fixtures', 'roms', 'zip', 'foobar.zip')), ChecksumBitmask.CRC32); - await FileCache.save(); - await FileCache.loadFile(tempCache); + await fileCache.save(); + await fileCache.loadFile(tempCache); }); }); diff --git a/test/types/files/fileFactory.test.ts b/test/types/files/fileFactory.test.ts index 73a55b922..1cc14cc80 100644 --- a/test/types/files/fileFactory.test.ts +++ b/test/types/files/fileFactory.test.ts @@ -3,6 +3,7 @@ import path from 'node:path'; import Temp from '../../../src/globals/temp.js'; import FsPoly from '../../../src/polyfill/fsPoly.js'; import ArchiveEntry from '../../../src/types/files/archives/archiveEntry.js'; +import FileCache from '../../../src/types/files/fileCache.js'; import FileFactory from '../../../src/types/files/fileFactory.js'; describe('filesFrom', () => { @@ -19,6 +20,7 @@ describe('filesFrom', () => { ['test/fixtures/roms/gz/three.gz', 1], ['test/fixtures/roms/gz/two.gz', 1], ['test/fixtures/roms/gz/unknown.gz', 1], + ['test/fixtures/roms/nkit/5bc2ce5b.nkit.iso', 1], ['test/fixtures/roms/rar/fizzbuzz.rar', 1], ['test/fixtures/roms/rar/foobar.rar', 1], ['test/fixtures/roms/rar/loremipsum.rar', 1], @@ -37,7 +39,7 @@ describe('filesFrom', () => { ['test/fixtures/roms/zip/unknown.zip', 1], ])('%s', (filePath, expectedCount) => { it('should read the entries of archives with valid extensions: %s', async () => { - const archiveEntries = await FileFactory.filesFrom(filePath); + const archiveEntries = await new FileFactory(new FileCache()).filesFrom(filePath); expect(archiveEntries.every((archiveEntry) => archiveEntry instanceof ArchiveEntry)) .toEqual(true); expect(archiveEntries).toHaveLength(expectedCount); @@ -48,7 +50,7 @@ describe('filesFrom', () => { await FsPoly.mkdir(path.dirname(tempFile), { recursive: true }); await FsPoly.copyFile(filePath, tempFile); try { - const archiveEntries = await FileFactory.filesFrom(tempFile); + const archiveEntries = await new FileFactory(new FileCache()).filesFrom(tempFile); expect(archiveEntries.every((archiveEntry) => archiveEntry instanceof ArchiveEntry)) .toEqual(true); expect(archiveEntries).toHaveLength(expectedCount); diff --git a/test/types/files/romHeader.test.ts b/test/types/files/romHeader.test.ts index c5b7091e8..788d08c45 100644 --- a/test/types/files/romHeader.test.ts +++ b/test/types/files/romHeader.test.ts @@ -1,4 +1,6 @@ import ROMScanner from '../../../src/modules/romScanner.js'; +import FileCache from '../../../src/types/files/fileCache.js'; +import FileFactory from '../../../src/types/files/fileFactory.js'; import ROMHeader from '../../../src/types/files/romHeader.js'; import Options from '../../../src/types/options.js'; import ProgressBarFake from '../../console/progressBarFake.js'; @@ -33,7 +35,7 @@ describe('headerFromFileStream', () => { it('should get a file header for headered files', async () => { const headeredRoms = await new ROMScanner(new Options({ input: ['./test/fixtures/roms/headered'], - }), new ProgressBarFake()).scan(); + }), new ProgressBarFake(), new FileFactory(new FileCache())).scan(); expect(headeredRoms).toHaveLength(6); for (const headeredRom of headeredRoms) { @@ -48,7 +50,11 @@ describe('headerFromFileStream', () => { it('should not get a file header for dummy files', async () => { const headeredRoms = await new ROMScanner(new Options({ input: ['./test/fixtures/roms/!(headered){,/}*'], - }), new ProgressBarFake()).scan(); + inputExclude: [ + './test/fixtures/roms/chd', + './test/fixtures/roms/nkit', + ], + }), new ProgressBarFake(), new FileFactory(new FileCache())).scan(); expect(headeredRoms.length).toBeGreaterThan(0); for (const headeredRom of headeredRoms) { diff --git a/test/types/options.test.ts b/test/types/options.test.ts index eb4e4e56c..15dbb9cb3 100644 --- a/test/types/options.test.ts +++ b/test/types/options.test.ts @@ -1,7 +1,5 @@ import path from 'node:path'; -import Header from '../../src/types/dats/logiqx/header.js'; -import LogiqxDAT from '../../src/types/dats/logiqx/logiqxDat.js'; import Options from '../../src/types/options.js'; describe('getOutputDirRoot', () => { @@ -22,51 +20,31 @@ describe('getOutputDirRoot', () => { }); describe('canRemoveHeader', () => { - test.each([ - 'Nintendo - Nintendo Entertainment System (Headered) (Parent-Clone)', - ])('should not remove header for headered DATs: %s', (datName) => { - const dat = new LogiqxDAT(new Header({ name: datName }), []); - const options = new Options({ removeHeaders: [''] }); - expect(options.canRemoveHeader(dat, '.smc')).toEqual(false); - }); - - test.each([ - 'Nintendo - Nintendo Entertainment System (Headerless) (Parent-Clone)', - ])('should remove header for headerless DATs: %s', (datName) => { - const dat = new LogiqxDAT(new Header({ name: datName }), []); - const options = new Options({ removeHeaders: [''] }); - expect(options.canRemoveHeader(dat, '.smc')).toEqual(true); - }); - test.each( ['.a78', '.lnx', '.nes', '.fds', '.smc'], )('should not remove header when option not provided: %s', (extension) => { - const dat = new LogiqxDAT(new Header(), []); const options = new Options(); - expect(options.canRemoveHeader(dat, extension)).toEqual(false); + expect(options.canRemoveHeader(extension)).toEqual(false); }); test.each( ['.a78', '.lnx', '.nes', '.fds', '.smc', '.someotherextension'], )('should remove header when no arg provided: %s', (extension) => { - const dat = new LogiqxDAT(new Header(), []); const options = new Options({ removeHeaders: [''] }); - expect(options.canRemoveHeader(dat, extension)).toEqual(true); + expect(options.canRemoveHeader(extension)).toEqual(true); }); test.each( ['.lnx', '.smc', '.someotherextension'], )('should remove header when extension matches: %s', (extension) => { - const dat = new LogiqxDAT(new Header(), []); const options = new Options({ removeHeaders: ['.LNX', '.smc', '.someotherextension'] }); - expect(options.canRemoveHeader(dat, extension)).toEqual(true); + expect(options.canRemoveHeader(extension)).toEqual(true); }); test.each( ['.a78', '.nes', '.fds'], )('should not remove header when extension does not match: %s', (extension) => { - const dat = new LogiqxDAT(new Header(), []); const options = new Options({ removeHeaders: ['.LNX', '.smc', '.someotherextension'] }); - expect(options.canRemoveHeader(dat, extension)).toEqual(false); + expect(options.canRemoveHeader(extension)).toEqual(false); }); });