mirror of
https://github.com/tauri-apps/tauri.git
synced 2026-02-06 11:22:04 +00:00
Merge branch 'dev' into fix/help-menu-mac
This commit is contained in:
commit
53f573129f
@ -1,5 +0,0 @@
|
||||
---
|
||||
"@tauri-apps/api": minor:feat
|
||||
---
|
||||
|
||||
Adds the `scrollBarStyle` option to the Webview and WebviewBuilder constructors.
|
||||
@ -1,6 +0,0 @@
|
||||
---
|
||||
"tauri-cli": minor:feat
|
||||
"tauri-utils": minor:feat
|
||||
---
|
||||
|
||||
Adds the `scrollBarStyle` option to the window configuration.
|
||||
@ -1,9 +0,0 @@
|
||||
---
|
||||
"tauri-runtime-wry": minor:feat
|
||||
"tauri-runtime": minor:feat
|
||||
"tauri": minor:feat
|
||||
---
|
||||
|
||||
Adds the `scroll_bar_style` option to the Webview and WebviewWindow builders.
|
||||
The possible values for this option are gated behind conditional compilation
|
||||
flags, and will need to be applied using conditional compilation if customised.
|
||||
@ -1,6 +0,0 @@
|
||||
---
|
||||
'tauri-cli': 'minor:feat'
|
||||
'tauri-bundler': 'minor:feat'
|
||||
---
|
||||
|
||||
Add a `--no-sign` flag to the `tauri build` and `tauri bundle` commands to skip the code signing step, improving the developer experience for local testing and development without requiring code signing keys.
|
||||
7
.changes/change-pr-14632.md
Normal file
7
.changes/change-pr-14632.md
Normal file
@ -0,0 +1,7 @@
|
||||
---
|
||||
"tauri-utils": patch:enhance
|
||||
"tauri-build": patch:enhance
|
||||
"tauri-cli": patch:enhance
|
||||
---
|
||||
|
||||
Small code refactors for improved code readability. No user facing changes.
|
||||
@ -27,12 +27,6 @@
|
||||
"dryRunCommand": true,
|
||||
"pipe": true
|
||||
},
|
||||
{
|
||||
"command": "cargo generate-lockfile",
|
||||
"dryRunCommand": true,
|
||||
"runFromRoot": true,
|
||||
"pipe": true
|
||||
},
|
||||
{
|
||||
"command": "cargo audit ${ process.env.CARGO_AUDIT_OPTIONS || '' }",
|
||||
"dryRunCommand": true,
|
||||
|
||||
@ -1,6 +0,0 @@
|
||||
---
|
||||
"tauri-utils": "minor:enhance"
|
||||
"@tauri-apps/api": "minor:enhance"
|
||||
---
|
||||
|
||||
Added a config to set a data_directory relative to the app-specific data dir in JavaScript and `tauri.conf.json`.
|
||||
@ -1,6 +0,0 @@
|
||||
---
|
||||
"@tauri-apps/cli": patch:enhance
|
||||
"tauri-cli": patch:enhance
|
||||
---
|
||||
|
||||
Set a default log level filter when running `tauri add log`.
|
||||
@ -1,6 +0,0 @@
|
||||
---
|
||||
"@tauri-apps/cli": minor:enhance
|
||||
"tauri-cli": minor:enhance
|
||||
---
|
||||
|
||||
Prompt to install the iOS platform if it isn't installed yet.
|
||||
@ -1,6 +0,0 @@
|
||||
---
|
||||
"tauri": "patch:bug"
|
||||
"tauri-macros": "patch:bug"
|
||||
---
|
||||
|
||||
Fix the stack overflow when having too many commands in a single invoke handler in release build
|
||||
5
.changes/window-config-focus-default-true.md
Normal file
5
.changes/window-config-focus-default-true.md
Normal file
@ -0,0 +1,5 @@
|
||||
---
|
||||
tauri: patch:bug
|
||||
---
|
||||
|
||||
`WindowConfig::focus` is set to `false` in `WindowConfig::default()`
|
||||
4
.github/CONTRIBUTING.md
vendored
4
.github/CONTRIBUTING.md
vendored
@ -33,11 +33,9 @@ Hi! We, the maintainers, are really excited that you are interested in contribut
|
||||
- It's OK to have multiple small commits as you work on the PR - we will let GitHub automatically squash it before merging.
|
||||
|
||||
- If adding new feature:
|
||||
|
||||
- Provide convincing reason to add this feature. Ideally you should open a suggestion issue first and have it greenlighted before working on it.
|
||||
|
||||
- If fixing a bug:
|
||||
|
||||
- If you are resolving a special issue, add `(fix: #xxxx[,#xxx])` (#xxxx is the issue id) in your PR title for a better release log, e.g. `fix: update entities encoding/decoding (fix #3899)`.
|
||||
- Provide detailed description of the bug in the PR, or link to an issue that does.
|
||||
|
||||
@ -98,7 +96,7 @@ You can use `cargo install --path . --debug` to speed up test builds.
|
||||
You can build the Rust documentation locally running the following script:
|
||||
|
||||
```bash
|
||||
$ RUSTDOCFLAGS="--cfg docsrs" cargo +nightly doc --all-features --open
|
||||
$ cargo +nightly doc --all-features --open
|
||||
```
|
||||
|
||||
### Developing the JS API
|
||||
|
||||
@ -78,7 +78,6 @@ jobs:
|
||||
with:
|
||||
node-version: 20
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: cargo login
|
||||
run: cargo login ${{ secrets.ORG_CRATES_IO_TOKEN }}
|
||||
|
||||
@ -57,7 +57,7 @@ function checkChangeFiles(changeFiles) {
|
||||
for (const [file, packages] of unknownTagsEntries) {
|
||||
for (const { package, tag } of packages) {
|
||||
console.error(
|
||||
`Package \`${package}\` has an uknown change tag ${tag} in ${file} `
|
||||
`Package \`${package}\` has an unknown change tag ${tag} in ${file} `
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@ -29,7 +29,7 @@ const ignore = [
|
||||
async function checkFile(file) {
|
||||
if (
|
||||
extensions.some((e) => file.endsWith(e))
|
||||
&& !ignore.some((i) => file.includes(`/${i}/`) || path.basename(file) == i)
|
||||
&& !ignore.some((i) => file.includes(`/${i}/`) || path.basename(file) === i)
|
||||
) {
|
||||
const fileStream = fs.createReadStream(file)
|
||||
const rl = readline.createInterface({
|
||||
|
||||
151
Cargo.lock
generated
151
Cargo.lock
generated
@ -1055,9 +1055,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "cargo-mobile2"
|
||||
version = "0.20.6"
|
||||
version = "0.21.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "35613119e2e16b293e56557a27da0d5bc42031f5edc0bf4f73a2b4d310d39c65"
|
||||
checksum = "dcea7efeaac9f0fd9f886f43a13dde186a1e2266fe6b53a42659e4e0689570de"
|
||||
dependencies = [
|
||||
"colored",
|
||||
"core-foundation 0.10.0",
|
||||
@ -1319,7 +1319,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "117725a109d387c937a1533ce01b450cbde6b88abceea8473c4d7a85853cda3c"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"windows-sys 0.48.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1641,6 +1641,12 @@ dependencies = [
|
||||
"syn 2.0.95",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ct-codecs"
|
||||
version = "1.1.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9b10589d1a5e400d61f9f38f12f884cfd080ff345de8f17efda36fe0e4a02aa8"
|
||||
|
||||
[[package]]
|
||||
name = "ctor"
|
||||
version = "0.2.9"
|
||||
@ -2860,9 +2866,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"js-sys",
|
||||
"libc",
|
||||
"r-efi",
|
||||
"wasi 0.14.2+wasi-0.2.4",
|
||||
"wasm-bindgen",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -3459,9 +3467,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ico"
|
||||
version = "0.4.0"
|
||||
version = "0.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cc50b891e4acf8fe0e71ef88ec43ad82ee07b3810ad09de10f1d01f072ed4b98"
|
||||
checksum = "3e795dff5605e0f04bff85ca41b51a96b83e80b281e96231bcaaf1ac35103371"
|
||||
dependencies = [
|
||||
"byteorder",
|
||||
"png",
|
||||
@ -3960,9 +3968,9 @@ checksum = "f5d4a7da358eff58addd2877a45865158f0d78c911d43a5784ceb7bbf52833b0"
|
||||
|
||||
[[package]]
|
||||
name = "js-sys"
|
||||
version = "0.3.77"
|
||||
version = "0.3.83"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f"
|
||||
checksum = "464a3709c7f55f1f721e5389aa6ea4e3bc6aba669353300af094b29ffbdde1d8"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"wasm-bindgen",
|
||||
@ -4311,7 +4319,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"windows-targets 0.48.5",
|
||||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -4592,11 +4600,12 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
|
||||
|
||||
[[package]]
|
||||
name = "minisign"
|
||||
version = "0.7.3"
|
||||
version = "0.8.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b23ef13ff1d745b1e52397daaa247e333c607f3cff96d4df2b798dc252db974b"
|
||||
checksum = "e6bf96cef396a17a96f7600281aa4da9229860b7a082601b1f6db6eaa5f99ee5"
|
||||
dependencies = [
|
||||
"getrandom 0.2.15",
|
||||
"ct-codecs",
|
||||
"getrandom 0.3.3",
|
||||
"rpassword",
|
||||
"scrypt",
|
||||
]
|
||||
@ -4888,11 +4897,10 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "num-bigint-dig"
|
||||
version = "0.8.4"
|
||||
version = "0.8.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dc84195820f291c7697304f3cbdadd1cb7199c0efc917ff5eafd71225c136151"
|
||||
checksum = "e661dda6640fad38e827a6d4a310ff4763082116fe217f279885c97f511bb0b7"
|
||||
dependencies = [
|
||||
"byteorder",
|
||||
"lazy_static",
|
||||
"libm",
|
||||
"num-integer",
|
||||
@ -6976,13 +6984,13 @@ checksum = "6c20b6793b5c2fa6553b250154b78d6d0db37e72700ae35fad9387a46f487c97"
|
||||
|
||||
[[package]]
|
||||
name = "rpassword"
|
||||
version = "7.3.1"
|
||||
version = "7.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "80472be3c897911d0137b2d2b9055faf6eeac5b14e324073d83bc17b191d7e3f"
|
||||
checksum = "66d4c8b64f049c6721ec8ccec37ddfc3d641c4a7fca57e8f2a89de509c73df39"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"rtoolbox",
|
||||
"windows-sys 0.48.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -8411,11 +8419,12 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tao"
|
||||
version = "0.34.1"
|
||||
version = "0.34.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b65dc99ae111a3255027d1eca24a3833bb3267d4556a6defddb455f3ca4f5b6c"
|
||||
checksum = "f3a753bdc39c07b192151523a3f77cd0394aa75413802c883a0f6f6a0e5ee2e7"
|
||||
dependencies = [
|
||||
"bitflags 2.7.0",
|
||||
"block2 0.6.0",
|
||||
"core-foundation 0.10.0",
|
||||
"core-graphics",
|
||||
"crossbeam-channel",
|
||||
@ -8484,7 +8493,7 @@ checksum = "61c41af27dd6d1e27b1b16b489db798443478cef1f06a660c96db617ba5de3b1"
|
||||
|
||||
[[package]]
|
||||
name = "tauri"
|
||||
version = "2.8.5"
|
||||
version = "2.9.5"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bytes",
|
||||
@ -8535,7 +8544,6 @@ dependencies = [
|
||||
"tracing",
|
||||
"tray-icon",
|
||||
"url",
|
||||
"urlpattern",
|
||||
"uuid",
|
||||
"webkit2gtk",
|
||||
"webview2-com",
|
||||
@ -8545,7 +8553,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tauri-build"
|
||||
version = "2.4.1"
|
||||
version = "2.5.3"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"cargo_toml",
|
||||
@ -8567,7 +8575,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tauri-bundler"
|
||||
version = "2.6.1"
|
||||
version = "2.7.5"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"ar",
|
||||
@ -8613,9 +8621,8 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tauri-cli"
|
||||
version = "2.8.4"
|
||||
version = "2.9.6"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"ar",
|
||||
"axum",
|
||||
"base64 0.22.1",
|
||||
@ -8627,6 +8634,7 @@ dependencies = [
|
||||
"css-color",
|
||||
"ctrlc",
|
||||
"dialoguer",
|
||||
"dirs 6.0.0",
|
||||
"duct",
|
||||
"dunce",
|
||||
"elf",
|
||||
@ -8667,6 +8675,7 @@ dependencies = [
|
||||
"plist",
|
||||
"pretty_assertions",
|
||||
"rand 0.9.1",
|
||||
"rayon",
|
||||
"regex",
|
||||
"resvg",
|
||||
"semver",
|
||||
@ -8680,6 +8689,7 @@ dependencies = [
|
||||
"tauri-macos-sign",
|
||||
"tauri-utils",
|
||||
"tempfile",
|
||||
"thiserror 2.0.12",
|
||||
"tokio",
|
||||
"toml 0.9.4",
|
||||
"toml_edit 0.23.2",
|
||||
@ -8687,7 +8697,9 @@ dependencies = [
|
||||
"url",
|
||||
"uuid",
|
||||
"walkdir",
|
||||
"which",
|
||||
"windows-sys 0.60.2",
|
||||
"zip 4.0.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -8703,7 +8715,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tauri-codegen"
|
||||
version = "2.4.0"
|
||||
version = "2.5.2"
|
||||
dependencies = [
|
||||
"base64 0.22.1",
|
||||
"brotli",
|
||||
@ -8769,9 +8781,8 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tauri-macos-sign"
|
||||
version = "2.2.0"
|
||||
version = "2.3.2"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"apple-codesign",
|
||||
"chrono",
|
||||
"dirs 6.0.0",
|
||||
@ -8784,12 +8795,13 @@ dependencies = [
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tempfile",
|
||||
"thiserror 2.0.12",
|
||||
"x509-certificate 0.23.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tauri-macros"
|
||||
version = "2.4.0"
|
||||
version = "2.5.2"
|
||||
dependencies = [
|
||||
"heck 0.5.0",
|
||||
"proc-macro2",
|
||||
@ -8801,7 +8813,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tauri-plugin"
|
||||
version = "2.4.0"
|
||||
version = "2.5.2"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"glob",
|
||||
@ -8849,7 +8861,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tauri-runtime"
|
||||
version = "2.8.0"
|
||||
version = "2.9.2"
|
||||
dependencies = [
|
||||
"cookie",
|
||||
"dpi",
|
||||
@ -8872,7 +8884,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tauri-runtime-wry"
|
||||
version = "2.8.1"
|
||||
version = "2.9.3"
|
||||
dependencies = [
|
||||
"gtk",
|
||||
"http 1.3.1",
|
||||
@ -8923,7 +8935,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tauri-utils"
|
||||
version = "2.7.0"
|
||||
version = "2.8.1"
|
||||
dependencies = [
|
||||
"aes-gcm",
|
||||
"anyhow",
|
||||
@ -10050,35 +10062,22 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen"
|
||||
version = "0.2.100"
|
||||
version = "0.2.106"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5"
|
||||
checksum = "0d759f433fa64a2d763d1340820e46e111a7a5ab75f993d1852d70b03dbb80fd"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"once_cell",
|
||||
"rustversion",
|
||||
"wasm-bindgen-macro",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-backend"
|
||||
version = "0.2.100"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6"
|
||||
dependencies = [
|
||||
"bumpalo",
|
||||
"log",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.95",
|
||||
"wasm-bindgen-shared",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-futures"
|
||||
version = "0.4.50"
|
||||
version = "0.4.56"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "555d470ec0bc3bb57890405e5d4322cc9ea83cebb085523ced7be4144dac1e61"
|
||||
checksum = "836d9622d604feee9e5de25ac10e3ea5f2d65b41eac0d9ce72eb5deae707ce7c"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"js-sys",
|
||||
@ -10089,9 +10088,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-macro"
|
||||
version = "0.2.100"
|
||||
version = "0.2.106"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407"
|
||||
checksum = "48cb0d2638f8baedbc542ed444afc0644a29166f1595371af4fecf8ce1e7eeb3"
|
||||
dependencies = [
|
||||
"quote",
|
||||
"wasm-bindgen-macro-support",
|
||||
@ -10099,22 +10098,22 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-macro-support"
|
||||
version = "0.2.100"
|
||||
version = "0.2.106"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de"
|
||||
checksum = "cefb59d5cd5f92d9dcf80e4683949f15ca4b511f4ac0a6e14d4e1ac60c6ecd40"
|
||||
dependencies = [
|
||||
"bumpalo",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.95",
|
||||
"wasm-bindgen-backend",
|
||||
"wasm-bindgen-shared",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-shared"
|
||||
version = "0.2.100"
|
||||
version = "0.2.106"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d"
|
||||
checksum = "cbc538057e648b67f72a982e708d485b2efa771e1ac05fec311f9f63e5800db4"
|
||||
dependencies = [
|
||||
"unicode-ident",
|
||||
]
|
||||
@ -10134,9 +10133,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "web-sys"
|
||||
version = "0.3.77"
|
||||
version = "0.3.83"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "33b6dd2ef9186f1f2072e409e99cd22a975331a6b3591b12c764e0e55c60d5d2"
|
||||
checksum = "9b32828d774c412041098d182a8b38b16ea816958e07cf40eec2bc080ae137ac"
|
||||
dependencies = [
|
||||
"js-sys",
|
||||
"wasm-bindgen",
|
||||
@ -10311,7 +10310,7 @@ version = "0.1.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
|
||||
dependencies = [
|
||||
"windows-sys 0.48.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -10856,9 +10855,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "worker"
|
||||
version = "0.6.0"
|
||||
version = "0.7.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7f6ac1566a3005b790b974f0621d77431e2a47e5f481276485f5ac0485775de2"
|
||||
checksum = "42c76c5889873a2c309365ad4503810c007d3c25fbb4e9fa9e4e23c4ceb3c7f2"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"axum",
|
||||
@ -10881,31 +10880,15 @@ dependencies = [
|
||||
"wasm-bindgen-futures",
|
||||
"wasm-streams",
|
||||
"web-sys",
|
||||
"worker-kv",
|
||||
"worker-macros",
|
||||
"worker-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "worker-kv"
|
||||
version = "0.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b0d30eb90e8db0657414129624c0d12c6cb480574bc2ddd584822db196cb9a52"
|
||||
dependencies = [
|
||||
"js-sys",
|
||||
"serde",
|
||||
"serde-wasm-bindgen",
|
||||
"serde_json",
|
||||
"thiserror 2.0.12",
|
||||
"wasm-bindgen",
|
||||
"wasm-bindgen-futures",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "worker-macros"
|
||||
version = "0.6.0"
|
||||
version = "0.7.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5ba7478759843ae3d56dc7ba2445e7a514a5d043eaa98cebac2789f7ab5221ee"
|
||||
checksum = "62c62584d037bad33789a6a5d605b3fccea1c52de9251d06f9d44054170dc612"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"proc-macro2",
|
||||
@ -10919,9 +10902,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "worker-sys"
|
||||
version = "0.6.0"
|
||||
version = "0.7.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "eb4d7a3273dd584b9526aec77bbcf815c51d1a0e17407b1a390cf5a39b6d4fbd"
|
||||
checksum = "72ddd412fd62c6eeffc1dd85e6ae5960a33b534f44a733df75b6e7519972bc74"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"js-sys",
|
||||
@ -10943,9 +10926,9 @@ checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51"
|
||||
|
||||
[[package]]
|
||||
name = "wry"
|
||||
version = "0.53.2"
|
||||
version = "0.53.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e3b6763512fe4b51c80b3ce9b50939d682acb4de335dfabbdb20d7a2642199b7"
|
||||
checksum = "6d78ec082b80fa088569a970d043bb3050abaabf4454101d44514ee8d9a8c9f6"
|
||||
dependencies = [
|
||||
"base64 0.22.1",
|
||||
"block2 0.6.0",
|
||||
|
||||
@ -71,3 +71,4 @@ opt-level = "s"
|
||||
schemars_derive = { git = 'https://github.com/tauri-apps/schemars.git', branch = 'feat/preserve-description-newlines' }
|
||||
tauri = { path = "./crates/tauri" }
|
||||
tauri-plugin = { path = "./crates/tauri-plugin" }
|
||||
tauri-utils = { path = "./crates/tauri-utils" }
|
||||
|
||||
@ -375,7 +375,7 @@ fn main() -> Result<()> {
|
||||
if let Some(filename) = bench_file.to_str() {
|
||||
utils::write_json(filename, &serde_json::to_value(&new_data)?)
|
||||
.context("failed to write benchmark results to file")?;
|
||||
println!("Results written to: {}", filename);
|
||||
println!("Results written to: {filename}");
|
||||
} else {
|
||||
eprintln!("Cannot write bench.json, path contains invalid UTF-8");
|
||||
}
|
||||
|
||||
@ -1,5 +1,35 @@
|
||||
# Changelog
|
||||
|
||||
## \[2.5.3]
|
||||
|
||||
### Dependencies
|
||||
|
||||
- Upgraded to `tauri-utils@2.8.1`
|
||||
- Upgraded to `tauri-codegen@2.5.2`
|
||||
|
||||
## \[2.5.2]
|
||||
|
||||
### Dependencies
|
||||
|
||||
- Upgraded to `tauri-codegen@2.5.1`
|
||||
|
||||
## \[2.5.1]
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- [`4b6b8690a`](https://www.github.com/tauri-apps/tauri/commit/4b6b8690ab886ebdf1307951cffbe03e31280baa) ([#14347](https://www.github.com/tauri-apps/tauri/pull/14347) by [@FabianLars](https://www.github.com/tauri-apps/tauri/../../FabianLars)) Fixed an issue that caused docs.rs builds to fail. No user facing changes.
|
||||
|
||||
## \[2.5.0]
|
||||
|
||||
### New Features
|
||||
|
||||
- [`3b4fac201`](https://www.github.com/tauri-apps/tauri/commit/3b4fac2017832d426dd07c5e24e26684eda57f7b) ([#14194](https://www.github.com/tauri-apps/tauri/pull/14194)) Add `tauri.conf.json > bundle > android > autoIncrementVersionCode` config option to automatically increment the Android version code.
|
||||
|
||||
### Dependencies
|
||||
|
||||
- Upgraded to `tauri-utils@2.8.0`
|
||||
- Upgraded to `tauri-codegen@2.5.0`
|
||||
|
||||
## \[2.4.1]
|
||||
|
||||
### Enhancements
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "tauri-build"
|
||||
version = "2.4.1"
|
||||
version = "2.5.3"
|
||||
description = "build time code to pair with https://crates.io/crates/tauri"
|
||||
exclude = ["CHANGELOG.md", "/target"]
|
||||
readme = "README.md"
|
||||
@ -22,14 +22,12 @@ targets = [
|
||||
"x86_64-linux-android",
|
||||
"x86_64-apple-ios",
|
||||
]
|
||||
rustc-args = ["--cfg", "docsrs"]
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1"
|
||||
quote = { version = "1", optional = true }
|
||||
tauri-codegen = { version = "2.4.0", path = "../tauri-codegen", optional = true }
|
||||
tauri-utils = { version = "2.7.0", path = "../tauri-utils", features = [
|
||||
tauri-codegen = { version = "2.5.2", path = "../tauri-codegen", optional = true }
|
||||
tauri-utils = { version = "2.8.1", path = "../tauri-utils", features = [
|
||||
"build",
|
||||
"resources",
|
||||
] }
|
||||
|
||||
@ -157,7 +157,7 @@ fn read_plugins_manifests() -> Result<BTreeMap<String, Manifest>> {
|
||||
Ok(manifests)
|
||||
}
|
||||
|
||||
struct InlinedPuginsAcl {
|
||||
struct InlinedPluginsAcl {
|
||||
manifests: BTreeMap<String, Manifest>,
|
||||
permission_files: BTreeMap<String, Vec<PermissionFile>>,
|
||||
}
|
||||
@ -165,7 +165,7 @@ struct InlinedPuginsAcl {
|
||||
fn inline_plugins(
|
||||
out_dir: &Path,
|
||||
inlined_plugins: HashMap<&'static str, InlinedPlugin>,
|
||||
) -> Result<InlinedPuginsAcl> {
|
||||
) -> Result<InlinedPluginsAcl> {
|
||||
let mut acl_manifests = BTreeMap::new();
|
||||
let mut permission_files_map = BTreeMap::new();
|
||||
|
||||
@ -250,7 +250,7 @@ permissions = [{default_permissions}]
|
||||
acl_manifests.insert(name.into(), manifest);
|
||||
}
|
||||
|
||||
Ok(InlinedPuginsAcl {
|
||||
Ok(InlinedPluginsAcl {
|
||||
manifests: acl_manifests,
|
||||
permission_files: permission_files_map,
|
||||
})
|
||||
|
||||
@ -120,6 +120,13 @@ impl CodegenContext {
|
||||
if info_plist_path.exists() {
|
||||
println!("cargo:rerun-if-changed={}", info_plist_path.display());
|
||||
}
|
||||
|
||||
if let Some(plist_path) = &config.bundle.macos.info_plist {
|
||||
let info_plist_path = config_parent.join(plist_path);
|
||||
if info_plist_path.exists() {
|
||||
println!("cargo:rerun-if-changed={}", info_plist_path.display());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let code = context_codegen(ContextData {
|
||||
|
||||
@ -165,21 +165,21 @@ fn copy_frameworks(dest_dir: &Path, frameworks: &[String]) -> Result<()> {
|
||||
.with_context(|| format!("Failed to create frameworks output directory at {dest_dir:?}"))?;
|
||||
for framework in frameworks.iter() {
|
||||
if framework.ends_with(".framework") {
|
||||
let src_path = PathBuf::from(framework);
|
||||
let src_path = Path::new(framework);
|
||||
let src_name = src_path
|
||||
.file_name()
|
||||
.expect("Couldn't get framework filename");
|
||||
let dest_path = dest_dir.join(src_name);
|
||||
copy_dir(&src_path, &dest_path)?;
|
||||
copy_dir(src_path, &dest_path)?;
|
||||
continue;
|
||||
} else if framework.ends_with(".dylib") {
|
||||
let src_path = PathBuf::from(framework);
|
||||
let src_path = Path::new(framework);
|
||||
if !src_path.exists() {
|
||||
return Err(anyhow::anyhow!("Library not found: {}", framework));
|
||||
}
|
||||
let src_name = src_path.file_name().expect("Couldn't get library filename");
|
||||
let dest_path = dest_dir.join(src_name);
|
||||
copy_file(&src_path, &dest_path)?;
|
||||
copy_file(src_path, &dest_path)?;
|
||||
continue;
|
||||
} else if framework.contains('/') {
|
||||
return Err(anyhow::anyhow!(
|
||||
@ -192,12 +192,8 @@ fn copy_frameworks(dest_dir: &Path, frameworks: &[String]) -> Result<()> {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
if copy_framework_from(&PathBuf::from("/Library/Frameworks/"), framework, dest_dir)?
|
||||
|| copy_framework_from(
|
||||
&PathBuf::from("/Network/Library/Frameworks/"),
|
||||
framework,
|
||||
dest_dir,
|
||||
)?
|
||||
if copy_framework_from("/Library/Frameworks/".as_ref(), framework, dest_dir)?
|
||||
|| copy_framework_from("/Network/Library/Frameworks/".as_ref(), framework, dest_dir)?
|
||||
{
|
||||
continue;
|
||||
}
|
||||
@ -263,7 +259,7 @@ impl WindowsAttributes {
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates the default attriute set wihtou the default app manifest.
|
||||
/// Creates the default attribute set without the default app manifest.
|
||||
#[must_use]
|
||||
pub fn new_without_app_manifest() -> Self {
|
||||
Self {
|
||||
@ -499,7 +495,7 @@ pub fn try_build(attributes: Attributes) -> Result<()> {
|
||||
println!("cargo:rustc-env=TAURI_ANDROID_PACKAGE_NAME_PREFIX={android_package_prefix}");
|
||||
|
||||
if let Some(project_dir) = env::var_os("TAURI_ANDROID_PROJECT_PATH").map(PathBuf::from) {
|
||||
mobile::generate_gradle_files(project_dir, &config)?;
|
||||
mobile::generate_gradle_files(project_dir)?;
|
||||
}
|
||||
|
||||
cfg_alias("dev", is_dev());
|
||||
|
||||
@ -2,18 +2,14 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
use std::{fs::write, path::PathBuf};
|
||||
use std::path::PathBuf;
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use semver::Version;
|
||||
use tauri_utils::{config::Config, write_if_changed};
|
||||
use tauri_utils::write_if_changed;
|
||||
|
||||
use crate::is_dev;
|
||||
|
||||
pub fn generate_gradle_files(project_dir: PathBuf, config: &Config) -> Result<()> {
|
||||
pub fn generate_gradle_files(project_dir: PathBuf) -> Result<()> {
|
||||
let gradle_settings_path = project_dir.join("tauri.settings.gradle");
|
||||
let app_build_gradle_path = project_dir.join("app").join("tauri.build.gradle.kts");
|
||||
let app_tauri_properties_path = project_dir.join("app").join("tauri.properties");
|
||||
|
||||
let mut gradle_settings =
|
||||
"// THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.\n".to_string();
|
||||
@ -21,7 +17,6 @@ pub fn generate_gradle_files(project_dir: PathBuf, config: &Config) -> Result<()
|
||||
val implementation by configurations
|
||||
dependencies {"
|
||||
.to_string();
|
||||
let mut app_tauri_properties = Vec::new();
|
||||
|
||||
for (env, value) in std::env::vars_os() {
|
||||
let env = env.to_string_lossy();
|
||||
@ -54,32 +49,6 @@ dependencies {"
|
||||
|
||||
app_build_gradle.push_str("\n}");
|
||||
|
||||
if let Some(version) = config.version.as_ref() {
|
||||
app_tauri_properties.push(format!("tauri.android.versionName={version}"));
|
||||
if let Some(version_code) = config.bundle.android.version_code.as_ref() {
|
||||
app_tauri_properties.push(format!("tauri.android.versionCode={version_code}"));
|
||||
} else if let Ok(version) = Version::parse(version) {
|
||||
let mut version_code = version.major * 1000000 + version.minor * 1000 + version.patch;
|
||||
|
||||
if is_dev() {
|
||||
version_code = version_code.clamp(1, 2100000000);
|
||||
}
|
||||
|
||||
if version_code == 0 {
|
||||
return Err(anyhow::anyhow!(
|
||||
"You must change the `version` in `tauri.conf.json`. The default value `0.0.0` is not allowed for Android package and must be at least `0.0.1`."
|
||||
));
|
||||
} else if version_code > 2100000000 {
|
||||
return Err(anyhow::anyhow!(
|
||||
"Invalid version code {}. Version code must be between 1 and 2100000000. You must change the `version` in `tauri.conf.json`.",
|
||||
version_code
|
||||
));
|
||||
}
|
||||
|
||||
app_tauri_properties.push(format!("tauri.android.versionCode={version_code}"));
|
||||
}
|
||||
}
|
||||
|
||||
// Overwrite only if changed to not trigger rebuilds
|
||||
write_if_changed(&gradle_settings_path, gradle_settings)
|
||||
.context("failed to write tauri.settings.gradle")?;
|
||||
@ -87,28 +56,8 @@ dependencies {"
|
||||
write_if_changed(&app_build_gradle_path, app_build_gradle)
|
||||
.context("failed to write tauri.build.gradle.kts")?;
|
||||
|
||||
if !app_tauri_properties.is_empty() {
|
||||
let app_tauri_properties_content = format!(
|
||||
"// THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.\n{}",
|
||||
app_tauri_properties.join("\n")
|
||||
);
|
||||
if std::fs::read_to_string(&app_tauri_properties_path)
|
||||
.map(|o| o != app_tauri_properties_content)
|
||||
.unwrap_or(true)
|
||||
{
|
||||
write(&app_tauri_properties_path, app_tauri_properties_content)
|
||||
.context("failed to write tauri.properties")?;
|
||||
}
|
||||
}
|
||||
|
||||
println!("cargo:rerun-if-changed={}", gradle_settings_path.display());
|
||||
println!("cargo:rerun-if-changed={}", app_build_gradle_path.display());
|
||||
if !app_tauri_properties.is_empty() {
|
||||
println!(
|
||||
"cargo:rerun-if-changed={}",
|
||||
app_tauri_properties_path.display()
|
||||
);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -1,5 +1,83 @@
|
||||
# Changelog
|
||||
|
||||
## \[2.7.5]
|
||||
|
||||
### Enhancements
|
||||
|
||||
- [`4176f93ae`](https://www.github.com/tauri-apps/tauri/commit/4176f93ae43ef66714c4934feb3df19df3a3e28a) ([#14570](https://www.github.com/tauri-apps/tauri/pull/14570) by [@chfaft](https://www.github.com/tauri-apps/tauri/../../chfaft)) Consider extensions that are defined in the wxs template.
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- [`018b4db22`](https://www.github.com/tauri-apps/tauri/commit/018b4db22e167fa67b37b0933e192a0f3556d3e5) ([#14625](https://www.github.com/tauri-apps/tauri/pull/14625) by [@Legend-Master](https://www.github.com/tauri-apps/tauri/../../Legend-Master)) Skip signing for NSIS uninstaller when using `--no-sign` flag
|
||||
- [`91becd9e4`](https://www.github.com/tauri-apps/tauri/commit/91becd9e4fa2db089ddc6b21dadc06133e939e08) ([#14627](https://www.github.com/tauri-apps/tauri/pull/14627) by [@Legend-Master](https://www.github.com/tauri-apps/tauri/../../Legend-Master)) Fix NSIS plugins not being signed due to wrong path handlings
|
||||
|
||||
### Dependencies
|
||||
|
||||
- Upgraded to `tauri-macos-sign@2.3.2`
|
||||
|
||||
## \[2.7.4]
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- [`1496145f8`](https://www.github.com/tauri-apps/tauri/commit/1496145f8222649efeff22b819a96208670bbea1) ([#14585](https://www.github.com/tauri-apps/tauri/pull/14585) by [@FabianLars](https://www.github.com/tauri-apps/tauri/../../FabianLars)) Fixed an issue that caused the AppImage bundler to fail with 404 errors for 32-bit builds.
|
||||
|
||||
### Performance Improvements
|
||||
|
||||
- [`ce98d87ce`](https://www.github.com/tauri-apps/tauri/commit/ce98d87ce0aaa907285852eb80691197424e03c3) ([#14474](https://www.github.com/tauri-apps/tauri/pull/14474) by [@Tunglies](https://www.github.com/tauri-apps/tauri/../../Tunglies)) refactor: remove needless collect. No user facing changes.
|
||||
- [`ee3cc4a91`](https://www.github.com/tauri-apps/tauri/commit/ee3cc4a91bf1315ecaefe90f423ffd55ef6c40db) ([#14475](https://www.github.com/tauri-apps/tauri/pull/14475) by [@Tunglies](https://www.github.com/tauri-apps/tauri/../../Tunglies)) perf: remove needless clones in various files for improved performance. No user facing changes.
|
||||
|
||||
### Dependencies
|
||||
|
||||
- Upgraded to `tauri-macos-sign@2.3.1`
|
||||
- Upgraded to `tauri-utils@2.8.1`
|
||||
- [`b5ef603d8`](https://www.github.com/tauri-apps/tauri/commit/b5ef603d84bd8044625e50dcfdabb099b2e9fdd9) ([#14478](https://www.github.com/tauri-apps/tauri/pull/14478) by [@Legend-Master](https://www.github.com/tauri-apps/tauri/../../Legend-Master)) Updated NSIS from 3.8 to 3.11
|
||||
|
||||
## \[2.7.3]
|
||||
|
||||
### Enhancements
|
||||
|
||||
- [`22edc65aa`](https://www.github.com/tauri-apps/tauri/commit/22edc65aad0b3e45515008e8e0866112da70c8a1) ([#14408](https://www.github.com/tauri-apps/tauri/pull/14408) by [@FabianLars](https://www.github.com/tauri-apps/tauri/../../FabianLars)) Set user-agent in bundler and cli http requests when fetching build tools.
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- [`9a1922636`](https://www.github.com/tauri-apps/tauri/commit/9a192263693d71123a9953e2a6ee60fad07500b4) ([#14410](https://www.github.com/tauri-apps/tauri/pull/14410) by [@Legend-Master](https://www.github.com/tauri-apps/tauri/../../Legend-Master)) Fix uninstall fails if you close the app manually during the 'Click Ok to kill it' dialog
|
||||
|
||||
## \[2.7.2]
|
||||
|
||||
### Enhancements
|
||||
|
||||
- [`7f710b8f3`](https://www.github.com/tauri-apps/tauri/commit/7f710b8f3b509ed327d76761926511cf56e66b2d) ([#14390](https://www.github.com/tauri-apps/tauri/pull/14390) by [@FabianLars](https://www.github.com/tauri-apps/tauri/../../FabianLars)) Inline linuxdeploy plugins which were previously downloaded from `https://raw.githubusercontent.com` which lately blocks many users with a 429 error.
|
||||
- [`fc017ee25`](https://www.github.com/tauri-apps/tauri/commit/fc017ee2577f48615367ea519386d3f37837e2c1) ([#14368](https://www.github.com/tauri-apps/tauri/pull/14368) by [@kandrelczyk](https://www.github.com/tauri-apps/tauri/../../kandrelczyk)) Mention symbol stripping on Linux in binary patch failed warning message
|
||||
|
||||
## \[2.7.1]
|
||||
|
||||
### Dependencies
|
||||
|
||||
- Upgraded to `tauri-macos-sign@2.3.0`
|
||||
|
||||
## \[2.7.0]
|
||||
|
||||
### New Features
|
||||
|
||||
- [`2a06d1006`](https://www.github.com/tauri-apps/tauri/commit/2a06d10066a806e392efe8bfb16d943ee0b0b61d) ([#14052](https://www.github.com/tauri-apps/tauri/pull/14052)) Add a `--no-sign` flag to the `tauri build` and `tauri bundle` commands to skip the code signing step, improving the developer experience for local testing and development without requiring code signing keys.
|
||||
- [`cc8c0b531`](https://www.github.com/tauri-apps/tauri/commit/cc8c0b53171173dbd1d01781a50de1a3ea159031) ([#14031](https://www.github.com/tauri-apps/tauri/pull/14031)) Support providing `plist::Value` as macOS entitlements.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- [`b06b3bd09`](https://www.github.com/tauri-apps/tauri/commit/b06b3bd091b0fed26cdcfb23cacb0462a7a9cc2d) ([#14126](https://www.github.com/tauri-apps/tauri/pull/14126)) Improve error messages with more context.
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- [`06d4a4ed6`](https://www.github.com/tauri-apps/tauri/commit/06d4a4ed6c146d6c7782016cf90037b56b944445) ([#14241](https://www.github.com/tauri-apps/tauri/pull/14241)) Set `APPIMAGE_EXTRACT_AND_RUN` on top of using the `--appimage-extra-and-run` cli arg for linuxdeploy.
|
||||
|
||||
### Dependencies
|
||||
|
||||
- Upgraded to `tauri-utils@2.8.0`
|
||||
|
||||
### Breaking Changes
|
||||
|
||||
- [`ed7c9a410`](https://www.github.com/tauri-apps/tauri/commit/ed7c9a4100e08c002212265549d12130d021ad1e) ([#14108](https://www.github.com/tauri-apps/tauri/pull/14108)) Changed `MacOsSettings::info_plist_path` to `MacOsSettings::info_plist`.
|
||||
|
||||
## \[2.6.1]
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "tauri-bundler"
|
||||
version = "2.6.1"
|
||||
version = "2.7.5"
|
||||
authors = [
|
||||
"George Burton <burtonageo@gmail.com>",
|
||||
"Tauri Programme within The Commons Conservancy",
|
||||
@ -15,13 +15,13 @@ rust-version = "1.77.2"
|
||||
exclude = ["CHANGELOG.md", "/target", "rustfmt.toml"]
|
||||
|
||||
[dependencies]
|
||||
tauri-utils = { version = "2.7.0", path = "../tauri-utils", features = [
|
||||
tauri-utils = { version = "2.8.1", path = "../tauri-utils", features = [
|
||||
"resources",
|
||||
] }
|
||||
image = "0.25"
|
||||
flate2 = "1"
|
||||
anyhow = "1"
|
||||
thiserror = "2"
|
||||
anyhow = "1"
|
||||
serde_json = "1"
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
strsim = "0.11"
|
||||
@ -44,6 +44,8 @@ url = "2"
|
||||
uuid = { version = "1", features = ["v4", "v5"] }
|
||||
regex = "1"
|
||||
goblin = "0.9"
|
||||
plist = "1"
|
||||
|
||||
|
||||
[target."cfg(target_os = \"windows\")".dependencies]
|
||||
bitness = "0.4"
|
||||
@ -57,8 +59,7 @@ features = ["Win32_System_SystemInformation", "Win32_System_Diagnostics_Debug"]
|
||||
[target."cfg(target_os = \"macos\")".dependencies]
|
||||
icns = { package = "tauri-icns", version = "0.1" }
|
||||
time = { version = "0.3", features = ["formatting"] }
|
||||
plist = "1"
|
||||
tauri-macos-sign = { version = "2.2.0", path = "../tauri-macos-sign" }
|
||||
tauri-macos-sign = { version = "2.3.2", path = "../tauri-macos-sign" }
|
||||
|
||||
[target."cfg(target_os = \"linux\")".dependencies]
|
||||
heck = "0.5"
|
||||
|
||||
@ -45,12 +45,10 @@ pub use self::{
|
||||
category::AppCategory,
|
||||
settings::{
|
||||
AppImageSettings, BundleBinary, BundleSettings, CustomSignCommandSettings, DebianSettings,
|
||||
DmgSettings, IosSettings, MacOsSettings, PackageSettings, PackageType, Position, RpmSettings,
|
||||
Settings, SettingsBuilder, Size, UpdaterSettings,
|
||||
DmgSettings, Entitlements, IosSettings, MacOsSettings, PackageSettings, PackageType, PlistKind,
|
||||
Position, RpmSettings, Settings, SettingsBuilder, Size, UpdaterSettings,
|
||||
},
|
||||
};
|
||||
#[cfg(target_os = "macos")]
|
||||
use anyhow::Context;
|
||||
pub use settings::{NsisSettings, WindowsSettings, WixLanguage, WixLanguageConfig, WixSettings};
|
||||
|
||||
use std::{
|
||||
@ -223,31 +221,30 @@ pub fn bundle_project(settings: &Settings) -> crate::Result<Vec<Bundle>> {
|
||||
.map(|b| b.bundle_paths)
|
||||
{
|
||||
for app_bundle_path in &app_bundle_paths {
|
||||
use crate::error::ErrorExt;
|
||||
|
||||
log::info!(action = "Cleaning"; "{}", app_bundle_path.display());
|
||||
match app_bundle_path.is_dir() {
|
||||
true => std::fs::remove_dir_all(app_bundle_path),
|
||||
false => std::fs::remove_file(app_bundle_path),
|
||||
}
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"Failed to clean the app bundle at {}",
|
||||
app_bundle_path.display()
|
||||
)
|
||||
})?
|
||||
.fs_context(
|
||||
"failed to clean the app bundle",
|
||||
app_bundle_path.to_path_buf(),
|
||||
)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if bundles.is_empty() {
|
||||
return Err(anyhow::anyhow!("No bundles were built").into());
|
||||
return Ok(bundles);
|
||||
}
|
||||
|
||||
let bundles_wo_updater = bundles
|
||||
let finished_bundles = bundles
|
||||
.iter()
|
||||
.filter(|b| b.package_type != PackageType::Updater)
|
||||
.collect::<Vec<_>>();
|
||||
let finished_bundles = bundles_wo_updater.len();
|
||||
.count();
|
||||
let pluralised = if finished_bundles == 1 {
|
||||
"bundle"
|
||||
} else {
|
||||
@ -276,7 +273,7 @@ fn sign_binaries_if_needed(settings: &Settings, target_os: &TargetPlatform) -> c
|
||||
if matches!(target_os, TargetPlatform::Windows) {
|
||||
if settings.windows().can_sign() {
|
||||
if settings.no_sign() {
|
||||
log::info!("Skipping binary signing due to --no-sign flag.");
|
||||
log::warn!("Skipping binary signing due to --no-sign flag.");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
|
||||
@ -0,0 +1,165 @@
|
||||
#! /bin/bash
|
||||
|
||||
# abort on all errors
|
||||
set -e
|
||||
|
||||
if [ "$DEBUG" != "" ]; then
|
||||
set -x
|
||||
fi
|
||||
|
||||
script=$(readlink -f "$0")
|
||||
|
||||
show_usage() {
|
||||
echo "Usage: $script --appdir <path to AppDir>"
|
||||
echo
|
||||
echo "Bundles GStreamer plugins into an AppDir"
|
||||
echo
|
||||
echo "Required variables:"
|
||||
echo " LINUXDEPLOY=\".../linuxdeploy\" path to linuxdeploy (e.g., AppImage); set automatically when plugin is run directly by linuxdeploy"
|
||||
echo
|
||||
echo "Optional variables:"
|
||||
echo " GSTREAMER_INCLUDE_BAD_PLUGINS=\"1\" (default: disabled; set to empty string or unset to disable)"
|
||||
echo " GSTREAMER_PLUGINS_DIR=\"...\" (directory containing GStreamer plugins; default: guessed based on main distro architecture)"
|
||||
echo " GSTREAMER_HELPERS_DIR=\"...\" (directory containing GStreamer helper tools like gst-plugin-scanner; default: guessed based on main distro architecture)"
|
||||
echo " GSTREAMER_VERSION=\"1.0\" (default: 1.0)"
|
||||
}
|
||||
|
||||
while [ "$1" != "" ]; do
|
||||
case "$1" in
|
||||
--plugin-api-version)
|
||||
echo "0"
|
||||
exit 0
|
||||
;;
|
||||
--appdir)
|
||||
APPDIR="$2"
|
||||
shift
|
||||
shift
|
||||
;;
|
||||
--help)
|
||||
show_usage
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
echo "Invalid argument: $1"
|
||||
echo
|
||||
show_usage
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [ "$APPDIR" == "" ]; then
|
||||
show_usage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! which patchelf &>/dev/null && ! type patchelf &>/dev/null; then
|
||||
echo "Error: patchelf not found"
|
||||
echo
|
||||
show_usage
|
||||
exit 2
|
||||
fi
|
||||
|
||||
if [[ "$LINUXDEPLOY" == "" ]]; then
|
||||
echo "Error: \$LINUXDEPLOY not set"
|
||||
echo
|
||||
show_usage
|
||||
exit 3
|
||||
fi
|
||||
|
||||
mkdir -p "$APPDIR"
|
||||
|
||||
export GSTREAMER_VERSION="${GSTREAMER_VERSION:-1.0}"
|
||||
|
||||
plugins_target_dir="$APPDIR"/usr/lib/gstreamer-"$GSTREAMER_VERSION"
|
||||
helpers_target_dir="$APPDIR"/usr/lib/gstreamer"$GSTREAMER_VERSION"/gstreamer-"$GSTREAMER_VERSION"
|
||||
|
||||
if [ "$GSTREAMER_PLUGINS_DIR" != "" ]; then
|
||||
plugins_dir="${GSTREAMER_PLUGINS_DIR}"
|
||||
elif [ -d /usr/lib/"$(uname -m)"-linux-gnu/gstreamer-"$GSTREAMER_VERSION" ]; then
|
||||
plugins_dir=/usr/lib/$(uname -m)-linux-gnu/gstreamer-"$GSTREAMER_VERSION"
|
||||
else
|
||||
plugins_dir=/usr/lib/gstreamer-"$GSTREAMER_VERSION"
|
||||
fi
|
||||
|
||||
if [ "$GSTREAMER_HELPERS_DIR" != "" ]; then
|
||||
helpers_dir="${GSTREAMER_HELPERS_DIR}"
|
||||
else
|
||||
helpers_dir=/usr/lib/$(uname -m)-linux-gnu/gstreamer"$GSTREAMER_VERSION"/gstreamer-"$GSTREAMER_VERSION"
|
||||
fi
|
||||
|
||||
if [ ! -d "$plugins_dir" ]; then
|
||||
echo "Error: could not find plugins directory: $plugins_dir"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
mkdir -p "$plugins_target_dir"
|
||||
|
||||
echo "Copying plugins into $plugins_target_dir"
|
||||
for i in "$plugins_dir"/*; do
|
||||
[ -d "$i" ] && continue
|
||||
[ ! -f "$i" ] && echo "File does not exist: $i" && continue
|
||||
|
||||
echo "Copying plugin: $i"
|
||||
cp "$i" "$plugins_target_dir"
|
||||
done
|
||||
|
||||
"$LINUXDEPLOY" --appdir "$APPDIR"
|
||||
|
||||
for i in "$plugins_target_dir"/*; do
|
||||
[ -d "$i" ] && continue
|
||||
[ ! -f "$i" ] && echo "File does not exist: $i" && continue
|
||||
(file "$i" | grep -v ELF --silent) && echo "Ignoring non ELF file: $i" && continue
|
||||
|
||||
echo "Manually setting rpath for $i"
|
||||
patchelf --set-rpath '$ORIGIN/..:$ORIGIN' "$i"
|
||||
done
|
||||
|
||||
mkdir -p "$helpers_target_dir"
|
||||
|
||||
echo "Copying helpers in $helpers_target_dir"
|
||||
for i in "$helpers_dir"/*; do
|
||||
[ -d "$i" ] && continue
|
||||
[ ! -f "$i" ] && echo "File does not exist: $i" && continue
|
||||
|
||||
echo "Copying helper: $i"
|
||||
cp "$i" "$helpers_target_dir"
|
||||
done
|
||||
|
||||
for i in "$helpers_target_dir"/*; do
|
||||
[ -d "$i" ] && continue
|
||||
[ ! -f "$i" ] && echo "File does not exist: $i" && continue
|
||||
(file "$i" | grep -v ELF --silent) && echo "Ignoring non ELF file: $i" && continue
|
||||
|
||||
echo "Manually setting rpath for $i"
|
||||
patchelf --set-rpath '$ORIGIN/../..' "$i"
|
||||
done
|
||||
|
||||
echo "Installing AppRun hook"
|
||||
mkdir -p "$APPDIR"/apprun-hooks
|
||||
|
||||
if [ "$GSTREAMER_VERSION" == "1.0" ]; then
|
||||
cat > "$APPDIR"/apprun-hooks/linuxdeploy-plugin-gstreamer.sh <<\EOF
|
||||
#! /bin/bash
|
||||
|
||||
export GST_REGISTRY_REUSE_PLUGIN_SCANNER="no"
|
||||
export GST_PLUGIN_SYSTEM_PATH_1_0="${APPDIR}/usr/lib/gstreamer-1.0"
|
||||
export GST_PLUGIN_PATH_1_0="${APPDIR}/usr/lib/gstreamer-1.0"
|
||||
|
||||
export GST_PLUGIN_SCANNER_1_0="${APPDIR}/usr/lib/gstreamer1.0/gstreamer-1.0/gst-plugin-scanner"
|
||||
export GST_PTP_HELPER_1_0="${APPDIR}/usr/lib/gstreamer1.0/gstreamer-1.0/gst-ptp-helper"
|
||||
EOF
|
||||
elif [ "$GSTREAMER_VERSION" == "0.10" ]; then
|
||||
cat > "$APPDIR"/apprun-hooks/linuxdeploy-plugin-gstreamer.sh <<\EOF
|
||||
#! /bin/bash
|
||||
|
||||
export GST_REGISTRY_REUSE_PLUGIN_SCANNER="no"
|
||||
export GST_PLUGIN_SYSTEM_PATH_0_10="${APPDIR}/usr/lib/gstreamer-1.0"
|
||||
|
||||
export GST_PLUGIN_SCANNER_0_10="${APPDIR}/usr/lib/gstreamer1.0/gstreamer-1.0/gst-plugin-scanner"
|
||||
export GST_PTP_HELPER_0_10="${APPDIR}/usr/lib/gstreamer1.0/gstreamer-1.0/gst-ptp-helper"
|
||||
EOF
|
||||
else
|
||||
echo "Warning: unknown GStreamer version: $GSTREAMER_VERSION, cannot install AppRun hook"
|
||||
fi
|
||||
|
||||
@ -0,0 +1,327 @@
|
||||
#! /usr/bin/env bash
|
||||
|
||||
# GTK3 environment variables: https://developer.gnome.org/gtk3/stable/gtk-running.html
|
||||
# GTK4 environment variables: https://developer.gnome.org/gtk4/stable/gtk-running.html
|
||||
|
||||
# abort on all errors
|
||||
set -e
|
||||
|
||||
if [ "$DEBUG" != "" ]; then
|
||||
set -x
|
||||
verbose="--verbose"
|
||||
fi
|
||||
|
||||
script=$(readlink -f "$0")
|
||||
|
||||
show_usage() {
|
||||
echo "Usage: $script --appdir <path to AppDir>"
|
||||
echo
|
||||
echo "Bundles resources for applications that use GTK into an AppDir"
|
||||
echo
|
||||
echo "Required variables:"
|
||||
echo " LINUXDEPLOY=\".../linuxdeploy\" path to linuxdeploy (e.g., AppImage); set automatically when plugin is run directly by linuxdeploy"
|
||||
#echo
|
||||
#echo "Optional variables:"
|
||||
#echo " DEPLOY_GTK_VERSION (major version of GTK to deploy, e.g. '2', '3' or '4'; auto-detect by default)"
|
||||
}
|
||||
|
||||
variable_is_true() {
|
||||
local var="$1"
|
||||
|
||||
if [ -n "$var" ] && { [ "$var" == "true" ] || [ "$var" -gt 0 ]; } 2> /dev/null; then
|
||||
return 0 # true
|
||||
else
|
||||
return 1 # false
|
||||
fi
|
||||
}
|
||||
|
||||
get_pkgconf_variable() {
|
||||
local variable="$1"
|
||||
local library="$2"
|
||||
local default_path="$3"
|
||||
|
||||
path="$("$PKG_CONFIG" --variable="$variable" "$library")"
|
||||
if [ -n "$path" ]; then
|
||||
echo "$path"
|
||||
elif [ -n "$default_path" ]; then
|
||||
echo "$default_path"
|
||||
else
|
||||
echo "$0: there is no '$variable' variable for '$library' library." > /dev/stderr
|
||||
echo "Please check the '$library.pc' file is present in \$PKG_CONFIG_PATH (you may need to install the appropriate -dev/-devel package)." > /dev/stderr
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
copy_tree() {
|
||||
local src=("${@:1:$#-1}")
|
||||
local dst="${*:$#}"
|
||||
|
||||
for elem in "${src[@]}"; do
|
||||
mkdir -p "${dst::-1}$elem"
|
||||
cp "$elem" --archive --parents --target-directory="$dst" $verbose
|
||||
done
|
||||
}
|
||||
|
||||
search_tool() {
|
||||
local tool="$1"
|
||||
local directory="$2"
|
||||
|
||||
if command -v "$tool"; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
PATH_ARRAY=(
|
||||
"/usr/lib/$(uname -m)-linux-gnu/$directory/$tool"
|
||||
"/usr/lib/$directory/$tool"
|
||||
"/usr/bin/$tool"
|
||||
"/usr/bin/$tool-64"
|
||||
"/usr/bin/$tool-32"
|
||||
)
|
||||
|
||||
for path in "${PATH_ARRAY[@]}"; do
|
||||
if [ -x "$path" ]; then
|
||||
echo "$path"
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
#DEPLOY_GTK_VERSION="${DEPLOY_GTK_VERSION:-0}" # When not set by user, this variable use the integer '0' as a sentinel value
|
||||
DEPLOY_GTK_VERSION=3 # Force GTK3 for tauri apps
|
||||
APPDIR=
|
||||
|
||||
while [ "$1" != "" ]; do
|
||||
case "$1" in
|
||||
--plugin-api-version)
|
||||
echo "0"
|
||||
exit 0
|
||||
;;
|
||||
--appdir)
|
||||
APPDIR="$2"
|
||||
shift
|
||||
shift
|
||||
;;
|
||||
--help)
|
||||
show_usage
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
echo "Invalid argument: $1"
|
||||
echo
|
||||
show_usage
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [ "$APPDIR" == "" ]; then
|
||||
show_usage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
mkdir -p "$APPDIR"
|
||||
# make lib64 writable again.
|
||||
chmod +w "$APPDIR"/usr/lib64 || true
|
||||
|
||||
if command -v pkgconf > /dev/null; then
|
||||
PKG_CONFIG="pkgconf"
|
||||
elif command -v pkg-config > /dev/null; then
|
||||
PKG_CONFIG="pkg-config"
|
||||
else
|
||||
echo "$0: pkg-config/pkgconf not found in PATH, aborting"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! command -v find &>/dev/null && ! type find &>/dev/null; then
|
||||
echo -e "$0: find not found.\nInstall findutils then re-run the plugin."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$LINUXDEPLOY" ]; then
|
||||
echo -e "$0: LINUXDEPLOY environment variable is not set.\nDownload a suitable linuxdeploy AppImage, set the environment variable and re-run the plugin."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
gtk_versions=0 # Count major versions of GTK when auto-detect GTK version
|
||||
if [ "$DEPLOY_GTK_VERSION" -eq 0 ]; then
|
||||
echo "Determining which GTK version to deploy"
|
||||
while IFS= read -r -d '' file; do
|
||||
if [ "$DEPLOY_GTK_VERSION" -ne 2 ] && ldd "$file" | grep -q "libgtk-x11-2.0.so"; then
|
||||
DEPLOY_GTK_VERSION=2
|
||||
gtk_versions="$((gtk_versions+1))"
|
||||
fi
|
||||
if [ "$DEPLOY_GTK_VERSION" -ne 3 ] && ldd "$file" | grep -q "libgtk-3.so"; then
|
||||
DEPLOY_GTK_VERSION=3
|
||||
gtk_versions="$((gtk_versions+1))"
|
||||
fi
|
||||
if [ "$DEPLOY_GTK_VERSION" -ne 4 ] && ldd "$file" | grep -q "libgtk-4.so"; then
|
||||
DEPLOY_GTK_VERSION=4
|
||||
gtk_versions="$((gtk_versions+1))"
|
||||
fi
|
||||
done < <(find "$APPDIR/usr/bin" -executable -type f -print0)
|
||||
fi
|
||||
|
||||
if [ "$gtk_versions" -gt 1 ]; then
|
||||
echo "$0: can not deploy multiple GTK versions at the same time."
|
||||
echo "Please set DEPLOY_GTK_VERSION to {2, 3, 4}."
|
||||
exit 1
|
||||
elif [ "$DEPLOY_GTK_VERSION" -eq 0 ]; then
|
||||
echo "$0: failed to auto-detect GTK version."
|
||||
echo "Please set DEPLOY_GTK_VERSION to {2, 3, 4}."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Installing AppRun hook"
|
||||
HOOKSDIR="$APPDIR/apprun-hooks"
|
||||
HOOKFILE="$HOOKSDIR/linuxdeploy-plugin-gtk.sh"
|
||||
mkdir -p "$HOOKSDIR"
|
||||
cat > "$HOOKFILE" <<\EOF
|
||||
#! /usr/bin/env bash
|
||||
|
||||
gsettings get org.gnome.desktop.interface gtk-theme 2> /dev/null | grep -qi "dark" && GTK_THEME_VARIANT="dark" || GTK_THEME_VARIANT="light"
|
||||
APPIMAGE_GTK_THEME="${APPIMAGE_GTK_THEME:-"Adwaita:$GTK_THEME_VARIANT"}" # Allow user to override theme (discouraged)
|
||||
|
||||
export APPDIR="${APPDIR:-"$(dirname "$(realpath "$0")")"}" # Workaround to run extracted AppImage
|
||||
export GTK_DATA_PREFIX="$APPDIR"
|
||||
export GTK_THEME="$APPIMAGE_GTK_THEME" # Custom themes are broken
|
||||
export GDK_BACKEND=x11 # Crash with Wayland backend on Wayland - We tested it without it and ended up with this: https://github.com/tauri-apps/tauri/issues/8541
|
||||
export XDG_DATA_DIRS="$APPDIR/usr/share:/usr/share:$XDG_DATA_DIRS" # g_get_system_data_dirs() from GLib
|
||||
EOF
|
||||
|
||||
echo "Installing GLib schemas"
|
||||
# Note: schemasdir is undefined on Ubuntu 16.04
|
||||
glib_schemasdir="$(get_pkgconf_variable "schemasdir" "gio-2.0" "/usr/share/glib-2.0/schemas")"
|
||||
copy_tree "$glib_schemasdir" "$APPDIR/"
|
||||
glib-compile-schemas "$APPDIR/$glib_schemasdir"
|
||||
cat >> "$HOOKFILE" <<EOF
|
||||
export GSETTINGS_SCHEMA_DIR="\$APPDIR/$glib_schemasdir"
|
||||
EOF
|
||||
|
||||
case "$DEPLOY_GTK_VERSION" in
|
||||
2)
|
||||
# https://github.com/linuxdeploy/linuxdeploy-plugin-gtk/pull/20#issuecomment-826354261
|
||||
echo "WARNING: Gtk+2 applications are not fully supported by this plugin"
|
||||
;;
|
||||
3)
|
||||
echo "Installing GTK 3.0 modules"
|
||||
gtk3_exec_prefix="$(get_pkgconf_variable "exec_prefix" "gtk+-3.0")"
|
||||
gtk3_libdir="$(get_pkgconf_variable "libdir" "gtk+-3.0")/gtk-3.0"
|
||||
#gtk3_path="$gtk3_libdir/modules" export GTK_PATH="\$APPDIR/$gtk3_path"
|
||||
gtk3_immodulesdir="$gtk3_libdir/$(get_pkgconf_variable "gtk_binary_version" "gtk+-3.0")/immodules"
|
||||
gtk3_printbackendsdir="$gtk3_libdir/$(get_pkgconf_variable "gtk_binary_version" "gtk+-3.0")/printbackends"
|
||||
gtk3_immodules_cache_file="$(dirname "$gtk3_immodulesdir")/immodules.cache"
|
||||
gtk3_immodules_query="$(search_tool "gtk-query-immodules-3.0" "libgtk-3-0")"
|
||||
copy_tree "$gtk3_libdir" "$APPDIR/"
|
||||
cat >> "$HOOKFILE" <<EOF
|
||||
export GTK_EXE_PREFIX="\$APPDIR/$gtk3_exec_prefix"
|
||||
export GTK_PATH="\$APPDIR/$gtk3_libdir:/usr/lib64/gtk-3.0:/usr/lib/x86_64-linux-gnu/gtk-3.0"
|
||||
export GTK_IM_MODULE_FILE="\$APPDIR/$gtk3_immodules_cache_file"
|
||||
|
||||
EOF
|
||||
if [ -x "$gtk3_immodules_query" ]; then
|
||||
echo "Updating immodules cache in $APPDIR/$gtk3_immodules_cache_file"
|
||||
"$gtk3_immodules_query" > "$APPDIR/$gtk3_immodules_cache_file"
|
||||
else
|
||||
echo "WARNING: gtk-query-immodules-3.0 not found"
|
||||
fi
|
||||
if [ ! -f "$APPDIR/$gtk3_immodules_cache_file" ]; then
|
||||
echo "WARNING: immodules.cache file is missing"
|
||||
fi
|
||||
sed -i "s|$gtk3_libdir/3.0.0/immodules/||g" "$APPDIR/$gtk3_immodules_cache_file"
|
||||
;;
|
||||
4)
|
||||
echo "Installing GTK 4.0 modules"
|
||||
gtk4_exec_prefix="$(get_pkgconf_variable "exec_prefix" "gtk4" "/usr")"
|
||||
gtk4_libdir="$(get_pkgconf_variable "libdir" "gtk4")/gtk-4.0"
|
||||
gtk4_path="$gtk4_libdir/modules"
|
||||
copy_tree "$gtk4_libdir" "$APPDIR/"
|
||||
cat >> "$HOOKFILE" <<EOF
|
||||
export GTK_EXE_PREFIX="\$APPDIR/$gtk4_exec_prefix"
|
||||
export GTK_PATH="\$APPDIR/$gtk4_path"
|
||||
EOF
|
||||
;;
|
||||
*)
|
||||
echo "$0: '$DEPLOY_GTK_VERSION' is not a valid GTK major version."
|
||||
echo "Please set DEPLOY_GTK_VERSION to {2, 3, 4}."
|
||||
exit 1
|
||||
esac
|
||||
|
||||
echo "Installing GDK PixBufs"
|
||||
gdk_libdir="$(get_pkgconf_variable "libdir" "gdk-pixbuf-2.0")"
|
||||
gdk_pixbuf_binarydir="$(get_pkgconf_variable "gdk_pixbuf_binarydir" "gdk-pixbuf-2.0")"
|
||||
gdk_pixbuf_cache_file="$(get_pkgconf_variable "gdk_pixbuf_cache_file" "gdk-pixbuf-2.0")"
|
||||
gdk_pixbuf_moduledir="$(get_pkgconf_variable "gdk_pixbuf_moduledir" "gdk-pixbuf-2.0")"
|
||||
# Note: gdk_pixbuf_query_loaders variable is not defined on some systems
|
||||
gdk_pixbuf_query="$(search_tool "gdk-pixbuf-query-loaders" "gdk-pixbuf-2.0")"
|
||||
copy_tree "$gdk_pixbuf_binarydir" "$APPDIR/"
|
||||
cat >> "$HOOKFILE" <<EOF
|
||||
export GDK_PIXBUF_MODULE_FILE="\$APPDIR/$gdk_pixbuf_cache_file"
|
||||
EOF
|
||||
if [ -x "$gdk_pixbuf_query" ]; then
|
||||
echo "Updating pixbuf cache in $APPDIR/$gdk_pixbuf_cache_file"
|
||||
"$gdk_pixbuf_query" > "$APPDIR/$gdk_pixbuf_cache_file"
|
||||
else
|
||||
echo "WARNING: gdk-pixbuf-query-loaders not found"
|
||||
fi
|
||||
if [ ! -f "$APPDIR/$gdk_pixbuf_cache_file" ]; then
|
||||
echo "WARNING: loaders.cache file is missing"
|
||||
fi
|
||||
sed -i "s|$gdk_pixbuf_moduledir/||g" "$APPDIR/$gdk_pixbuf_cache_file"
|
||||
|
||||
echo "Copying more libraries"
|
||||
gobject_libdir="$(get_pkgconf_variable "libdir" "gobject-2.0")"
|
||||
gio_libdir="$(get_pkgconf_variable "libdir" "gio-2.0")"
|
||||
librsvg_libdir="$(get_pkgconf_variable "libdir" "librsvg-2.0")"
|
||||
pango_libdir="$(get_pkgconf_variable "libdir" "pango")"
|
||||
pangocairo_libdir="$(get_pkgconf_variable "libdir" "pangocairo")"
|
||||
pangoft2_libdir="$(get_pkgconf_variable "libdir" "pangoft2")"
|
||||
FIND_ARRAY=(
|
||||
"$gdk_libdir" "libgdk_pixbuf-*.so*"
|
||||
"$gobject_libdir" "libgobject-*.so*"
|
||||
"$gio_libdir" "libgio-*.so*"
|
||||
"$librsvg_libdir" "librsvg-*.so*"
|
||||
"$pango_libdir" "libpango-*.so*"
|
||||
"$pangocairo_libdir" "libpangocairo-*.so*"
|
||||
"$pangoft2_libdir" "libpangoft2-*.so*"
|
||||
)
|
||||
LIBRARIES=()
|
||||
for (( i=0; i<${#FIND_ARRAY[@]}; i+=2 )); do
|
||||
directory=${FIND_ARRAY[i]}
|
||||
library=${FIND_ARRAY[i+1]}
|
||||
while IFS= read -r -d '' file; do
|
||||
LIBRARIES+=( "--library=$file" )
|
||||
done < <(find "$directory" \( -type l -o -type f \) -name "$library" -print0)
|
||||
done
|
||||
|
||||
env LINUXDEPLOY_PLUGIN_MODE=1 "$LINUXDEPLOY" --appdir="$APPDIR" "${LIBRARIES[@]}"
|
||||
|
||||
# Create symbolic links as a workaround
|
||||
# Details: https://github.com/linuxdeploy/linuxdeploy-plugin-gtk/issues/24#issuecomment-1030026529
|
||||
echo "Manually setting rpath for GTK modules"
|
||||
PATCH_ARRAY=(
|
||||
"$gtk3_immodulesdir"
|
||||
"$gtk3_printbackendsdir"
|
||||
"$gdk_pixbuf_moduledir"
|
||||
)
|
||||
for directory in "${PATCH_ARRAY[@]}"; do
|
||||
while IFS= read -r -d '' file; do
|
||||
ln $verbose -s "${file/\/usr\/lib\//}" "$APPDIR/usr/lib"
|
||||
done < <(find "$directory" -name '*.so' -print0)
|
||||
done
|
||||
|
||||
# set write permission on lib64 again to make it deletable.
|
||||
chmod +w "$APPDIR"/usr/lib64 || true
|
||||
|
||||
# We have to copy the files first to not get permission errors when we assign gio_extras_dir
|
||||
find /usr/lib* -name libgiognutls.so -exec mkdir -p "$APPDIR"/"$(dirname '{}')" \; -exec cp --parents '{}' "$APPDIR/" \; || true
|
||||
# related files that we seemingly don't need:
|
||||
# libgiolibproxy.so - libgiognomeproxy.so - glib-pacrunner
|
||||
|
||||
gio_extras_dir=$(find "$APPDIR"/usr/lib* -name libgiognutls.so -exec dirname '{}' \; 2>/dev/null)
|
||||
cat >> "$HOOKFILE" <<EOF
|
||||
export GIO_EXTRA_MODULES="\$APPDIR/${gio_extras_dir#"$APPDIR"/}"
|
||||
EOF
|
||||
|
||||
#binary patch absolute paths in libwebkit files
|
||||
find "$APPDIR"/usr/lib* -name 'libwebkit*' -exec sed -i -e "s|/usr|././|g" '{}' \;
|
||||
|
||||
@ -6,10 +6,10 @@
|
||||
use super::debian;
|
||||
use crate::{
|
||||
bundle::settings::Arch,
|
||||
error::{Context, ErrorExt},
|
||||
utils::{fs_utils, http_utils::download, CommandExt},
|
||||
Settings,
|
||||
};
|
||||
use anyhow::Context;
|
||||
use std::{
|
||||
fs,
|
||||
path::{Path, PathBuf},
|
||||
@ -124,13 +124,13 @@ pub fn bundle_project(settings: &Settings) -> crate::Result<Vec<PathBuf>> {
|
||||
// xdg-open will be handled by the `files` config instead
|
||||
if settings.deep_link_protocols().is_some() && !app_dir_usr_bin.join("xdg-open").exists() {
|
||||
fs::copy("/usr/bin/xdg-mime", app_dir_usr_bin.join("xdg-mime"))
|
||||
.context("xdg-mime binary not found")?;
|
||||
.fs_context("xdg-mime binary not found", "/usr/bin/xdg-mime".to_string())?;
|
||||
}
|
||||
|
||||
// we also check if the user may have provided their own copy already
|
||||
if settings.appimage().bundle_xdg_open && !app_dir_usr_bin.join("xdg-open").exists() {
|
||||
fs::copy("/usr/bin/xdg-open", app_dir_usr_bin.join("xdg-open"))
|
||||
.context("xdg-open binary not found")?;
|
||||
.fs_context("xdg-open binary not found", "/usr/bin/xdg-open".to_string())?;
|
||||
}
|
||||
|
||||
let search_dirs = [
|
||||
@ -190,6 +190,8 @@ pub fn bundle_project(settings: &Settings) -> crate::Result<Vec<PathBuf>> {
|
||||
let mut cmd = Command::new(linuxdeploy_path);
|
||||
cmd.env("OUTPUT", &appimage_path);
|
||||
cmd.env("ARCH", tools_arch);
|
||||
// Looks like the cli arg isn't enough for the updated AppImage output-plugin.
|
||||
cmd.env("APPIMAGE_EXTRACT_AND_RUN", "1");
|
||||
cmd.args([
|
||||
"--appimage-extract-and-run",
|
||||
"--verbosity",
|
||||
@ -227,25 +229,25 @@ fn prepare_tools(tools_path: &Path, arch: &str, verbose: bool) -> crate::Result<
|
||||
let data = download(&format!(
|
||||
"https://github.com/tauri-apps/binary-releases/releases/download/apprun-old/AppRun-{arch}"
|
||||
))?;
|
||||
write_and_make_executable(&apprun, data)?;
|
||||
write_and_make_executable(&apprun, &data)?;
|
||||
}
|
||||
|
||||
let linuxdeploy_arch = if arch == "i686" { "i383" } else { arch };
|
||||
let linuxdeploy_arch = if arch == "i686" { "i386" } else { arch };
|
||||
let linuxdeploy = tools_path.join(format!("linuxdeploy-{linuxdeploy_arch}.AppImage"));
|
||||
if !linuxdeploy.exists() {
|
||||
let data = download(&format!("https://github.com/tauri-apps/binary-releases/releases/download/linuxdeploy/linuxdeploy-{linuxdeploy_arch}.AppImage"))?;
|
||||
write_and_make_executable(&linuxdeploy, data)?;
|
||||
write_and_make_executable(&linuxdeploy, &data)?;
|
||||
}
|
||||
|
||||
let gtk = tools_path.join("linuxdeploy-plugin-gtk.sh");
|
||||
if !gtk.exists() {
|
||||
let data = download("https://raw.githubusercontent.com/tauri-apps/linuxdeploy-plugin-gtk/master/linuxdeploy-plugin-gtk.sh")?;
|
||||
let data = include_bytes!("./linuxdeploy-plugin-gtk.sh");
|
||||
write_and_make_executable(>k, data)?;
|
||||
}
|
||||
|
||||
let gstreamer = tools_path.join("linuxdeploy-plugin-gstreamer.sh");
|
||||
if !gstreamer.exists() {
|
||||
let data = download("https://raw.githubusercontent.com/tauri-apps/linuxdeploy-plugin-gstreamer/master/linuxdeploy-plugin-gstreamer.sh")?;
|
||||
let data = include_bytes!("./linuxdeploy-plugin-gstreamer.sh");
|
||||
write_and_make_executable(&gstreamer, data)?;
|
||||
}
|
||||
|
||||
@ -254,7 +256,7 @@ fn prepare_tools(tools_path: &Path, arch: &str, verbose: bool) -> crate::Result<
|
||||
// This is optional, linuxdeploy will fall back to its built-in version if the download failed.
|
||||
let data = download(&format!("https://github.com/linuxdeploy/linuxdeploy-plugin-appimage/releases/download/continuous/linuxdeploy-plugin-appimage-{arch}.AppImage"));
|
||||
match data {
|
||||
Ok(data) => write_and_make_executable(&appimage, data)?,
|
||||
Ok(data) => write_and_make_executable(&appimage, &data)?,
|
||||
Err(err) => {
|
||||
log::error!("Download of AppImage plugin failed. Using older built-in version instead.");
|
||||
if verbose {
|
||||
@ -279,7 +281,7 @@ fn prepare_tools(tools_path: &Path, arch: &str, verbose: bool) -> crate::Result<
|
||||
Ok(linuxdeploy)
|
||||
}
|
||||
|
||||
fn write_and_make_executable(path: &Path, data: Vec<u8>) -> std::io::Result<()> {
|
||||
fn write_and_make_executable(path: &Path, data: &[u8]) -> std::io::Result<()> {
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
|
||||
fs::write(path, data)?;
|
||||
@ -24,8 +24,12 @@
|
||||
// generate postinst or prerm files.
|
||||
|
||||
use super::freedesktop;
|
||||
use crate::{bundle::settings::Arch, utils::fs_utils, Settings};
|
||||
use anyhow::Context;
|
||||
use crate::{
|
||||
bundle::settings::Arch,
|
||||
error::{Context, ErrorExt},
|
||||
utils::fs_utils,
|
||||
Settings,
|
||||
};
|
||||
use flate2::{write::GzEncoder, Compression};
|
||||
use tar::HeaderMode;
|
||||
use walkdir::WalkDir;
|
||||
@ -64,30 +68,32 @@ pub fn bundle_project(settings: &Settings) -> crate::Result<Vec<PathBuf>> {
|
||||
let base_dir = settings.project_out_directory().join("bundle/deb");
|
||||
let package_dir = base_dir.join(&package_base_name);
|
||||
if package_dir.exists() {
|
||||
fs::remove_dir_all(&package_dir)
|
||||
.with_context(|| format!("Failed to remove old {package_base_name}"))?;
|
||||
fs::remove_dir_all(&package_dir).fs_context(
|
||||
"Failed to Remove old package directory",
|
||||
package_dir.clone(),
|
||||
)?;
|
||||
}
|
||||
let package_path = base_dir.join(&package_name);
|
||||
|
||||
log::info!(action = "Bundling"; "{} ({})", package_name, package_path.display());
|
||||
|
||||
let (data_dir, _) = generate_data(settings, &package_dir)
|
||||
.with_context(|| "Failed to build data folders and files")?;
|
||||
let (data_dir, _) =
|
||||
generate_data(settings, &package_dir).context("Failed to build data folders and files")?;
|
||||
fs_utils::copy_custom_files(&settings.deb().files, &data_dir)
|
||||
.with_context(|| "Failed to copy custom files")?;
|
||||
.context("Failed to copy custom files")?;
|
||||
|
||||
// Generate control files.
|
||||
let control_dir = package_dir.join("control");
|
||||
generate_control_file(settings, arch, &control_dir, &data_dir)
|
||||
.with_context(|| "Failed to create control file")?;
|
||||
generate_scripts(settings, &control_dir).with_context(|| "Failed to create control scripts")?;
|
||||
generate_md5sums(&control_dir, &data_dir).with_context(|| "Failed to create md5sums file")?;
|
||||
.context("Failed to create control file")?;
|
||||
generate_scripts(settings, &control_dir).context("Failed to create control scripts")?;
|
||||
generate_md5sums(&control_dir, &data_dir).context("Failed to create md5sums file")?;
|
||||
|
||||
// Generate `debian-binary` file; see
|
||||
// http://www.tldp.org/HOWTO/Debian-Binary-Package-Building-HOWTO/x60.html#AEN66
|
||||
let debian_binary_path = package_dir.join("debian-binary");
|
||||
create_file_with_data(&debian_binary_path, "2.0\n")
|
||||
.with_context(|| "Failed to create debian-binary file")?;
|
||||
.context("Failed to create debian-binary file")?;
|
||||
|
||||
// Apply tar/gzip/ar to create the final package file.
|
||||
let control_tar_gz_path =
|
||||
@ -113,8 +119,9 @@ pub fn generate_data(
|
||||
|
||||
for bin in settings.binaries() {
|
||||
let bin_path = settings.binary_path(bin);
|
||||
fs_utils::copy_file(&bin_path, &bin_dir.join(bin.name()))
|
||||
.with_context(|| format!("Failed to copy binary from {bin_path:?}"))?;
|
||||
let trgt = bin_dir.join(bin.name());
|
||||
fs_utils::copy_file(&bin_path, &trgt)
|
||||
.with_context(|| format!("Failed to copy binary from {bin_path:?} to {trgt:?}"))?;
|
||||
}
|
||||
|
||||
copy_resource_files(settings, &data_dir).with_context(|| "Failed to copy resource files")?;
|
||||
|
||||
@ -21,12 +21,12 @@ use std::fs::{read_to_string, File};
|
||||
use std::io::BufReader;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use anyhow::Context;
|
||||
use handlebars::Handlebars;
|
||||
use image::{self, codecs::png::PngDecoder, ImageDecoder};
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::{
|
||||
error::Context,
|
||||
utils::{self, fs_utils},
|
||||
Settings,
|
||||
};
|
||||
@ -114,11 +114,13 @@ pub fn generate_desktop_file(
|
||||
if let Some(template) = custom_template_path {
|
||||
handlebars
|
||||
.register_template_string("main.desktop", read_to_string(template)?)
|
||||
.with_context(|| "Failed to setup custom handlebar template")?;
|
||||
.map_err(Into::into)
|
||||
.context("Failed to setup custom handlebar template")?;
|
||||
} else {
|
||||
handlebars
|
||||
.register_template_string("main.desktop", include_str!("./main.desktop"))
|
||||
.with_context(|| "Failed to setup default handlebar template")?;
|
||||
.map_err(Into::into)
|
||||
.context("Failed to setup default handlebar template")?;
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
|
||||
@ -3,9 +3,8 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
use crate::{bundle::settings::Arch, Settings};
|
||||
use crate::{bundle::settings::Arch, error::ErrorExt, Settings};
|
||||
|
||||
use anyhow::Context;
|
||||
use rpm::{self, signature::pgp, Dependency, FileMode, FileOptions};
|
||||
use std::{
|
||||
env,
|
||||
@ -48,10 +47,13 @@ pub fn bundle_project(settings: &Settings) -> crate::Result<Vec<PathBuf>> {
|
||||
let base_dir = settings.project_out_directory().join("bundle/rpm");
|
||||
let package_dir = base_dir.join(&package_base_name);
|
||||
if package_dir.exists() {
|
||||
fs::remove_dir_all(&package_dir)
|
||||
.with_context(|| format!("Failed to remove old {package_base_name}"))?;
|
||||
fs::remove_dir_all(&package_dir).fs_context(
|
||||
"Failed to remove old package directory",
|
||||
package_dir.clone(),
|
||||
)?;
|
||||
}
|
||||
fs::create_dir_all(&package_dir)?;
|
||||
fs::create_dir_all(&package_dir)
|
||||
.fs_context("Failed to create package directory", package_dir.clone())?;
|
||||
let package_path = base_dir.join(&package_name);
|
||||
|
||||
log::info!(action = "Bundling"; "{} ({})", package_name, package_path.display());
|
||||
|
||||
@ -24,15 +24,16 @@
|
||||
|
||||
use super::{
|
||||
icon::create_icns_file,
|
||||
sign::{notarize, notarize_auth, notarize_without_stapling, sign, NotarizeAuthError, SignTarget},
|
||||
sign::{notarize, notarize_auth, notarize_without_stapling, sign, SignTarget},
|
||||
};
|
||||
use crate::{
|
||||
bundle::settings::PlistKind,
|
||||
error::{Context, ErrorExt, NotarizeAuthError},
|
||||
utils::{fs_utils, CommandExt},
|
||||
Error::GenericError,
|
||||
Settings,
|
||||
};
|
||||
|
||||
use anyhow::Context;
|
||||
|
||||
use std::{
|
||||
ffi::OsStr,
|
||||
fs,
|
||||
@ -65,11 +66,11 @@ pub fn bundle_project(settings: &Settings) -> crate::Result<Vec<PathBuf>> {
|
||||
|
||||
if app_bundle_path.exists() {
|
||||
fs::remove_dir_all(&app_bundle_path)
|
||||
.with_context(|| format!("Failed to remove old {app_product_name}"))?;
|
||||
.fs_context("failed to remove old app bundle", &app_bundle_path)?;
|
||||
}
|
||||
let bundle_directory = app_bundle_path.join("Contents");
|
||||
fs::create_dir_all(&bundle_directory)
|
||||
.with_context(|| format!("Failed to create bundle directory at {bundle_directory:?}"))?;
|
||||
.fs_context("failed to create bundle directory", &bundle_directory)?;
|
||||
|
||||
let resources_dir = bundle_directory.join("Resources");
|
||||
let bin_dir = bundle_directory.join("MacOS");
|
||||
@ -133,7 +134,7 @@ pub fn bundle_project(settings: &Settings) -> crate::Result<Vec<PathBuf>> {
|
||||
}
|
||||
Err(e) => {
|
||||
if matches!(e, NotarizeAuthError::MissingTeamId) {
|
||||
return Err(anyhow::anyhow!("{e}").into());
|
||||
return Err(e.into());
|
||||
} else {
|
||||
log::warn!("skipping app notarization, {}", e.to_string());
|
||||
}
|
||||
@ -173,6 +174,12 @@ fn copy_binaries_to_bundle(
|
||||
/// Copies user-defined files to the app under Contents.
|
||||
fn copy_custom_files_to_bundle(bundle_directory: &Path, settings: &Settings) -> crate::Result<()> {
|
||||
for (contents_path, path) in settings.macos().files.iter() {
|
||||
if !path.try_exists()? {
|
||||
return Err(GenericError(format!(
|
||||
"Failed to copy {path:?} to {contents_path:?}. {path:?} does not exist."
|
||||
)));
|
||||
}
|
||||
|
||||
let contents_path = if contents_path.is_absolute() {
|
||||
contents_path.strip_prefix("/").unwrap()
|
||||
} else {
|
||||
@ -181,9 +188,13 @@ fn copy_custom_files_to_bundle(bundle_directory: &Path, settings: &Settings) ->
|
||||
if path.is_file() {
|
||||
fs_utils::copy_file(path, &bundle_directory.join(contents_path))
|
||||
.with_context(|| format!("Failed to copy file {path:?} to {contents_path:?}"))?;
|
||||
} else {
|
||||
} else if path.is_dir() {
|
||||
fs_utils::copy_dir(path, &bundle_directory.join(contents_path))
|
||||
.with_context(|| format!("Failed to copy directory {path:?} to {contents_path:?}"))?;
|
||||
} else {
|
||||
return Err(GenericError(format!(
|
||||
"{path:?} is not a file or directory."
|
||||
)));
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
@ -253,6 +264,55 @@ fn create_info_plist(
|
||||
}
|
||||
|
||||
if let Some(associations) = settings.file_associations() {
|
||||
let exported_associations = associations
|
||||
.iter()
|
||||
.filter_map(|association| {
|
||||
association.exported_type.as_ref().map(|exported_type| {
|
||||
let mut dict = plist::Dictionary::new();
|
||||
|
||||
dict.insert(
|
||||
"UTTypeIdentifier".into(),
|
||||
exported_type.identifier.clone().into(),
|
||||
);
|
||||
if let Some(description) = &association.description {
|
||||
dict.insert("UTTypeDescription".into(), description.clone().into());
|
||||
}
|
||||
if let Some(conforms_to) = &exported_type.conforms_to {
|
||||
dict.insert(
|
||||
"UTTypeConformsTo".into(),
|
||||
plist::Value::Array(conforms_to.iter().map(|s| s.clone().into()).collect()),
|
||||
);
|
||||
}
|
||||
|
||||
let mut specification = plist::Dictionary::new();
|
||||
specification.insert(
|
||||
"public.filename-extension".into(),
|
||||
plist::Value::Array(
|
||||
association
|
||||
.ext
|
||||
.iter()
|
||||
.map(|s| s.to_string().into())
|
||||
.collect(),
|
||||
),
|
||||
);
|
||||
if let Some(mime_type) = &association.mime_type {
|
||||
specification.insert("public.mime-type".into(), mime_type.clone().into());
|
||||
}
|
||||
|
||||
dict.insert("UTTypeTagSpecification".into(), specification.into());
|
||||
|
||||
plist::Value::Dictionary(dict)
|
||||
})
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if !exported_associations.is_empty() {
|
||||
plist.insert(
|
||||
"UTExportedTypeDeclarations".into(),
|
||||
plist::Value::Array(exported_associations),
|
||||
);
|
||||
}
|
||||
|
||||
plist.insert(
|
||||
"CFBundleDocumentTypes".into(),
|
||||
plist::Value::Array(
|
||||
@ -260,16 +320,27 @@ fn create_info_plist(
|
||||
.iter()
|
||||
.map(|association| {
|
||||
let mut dict = plist::Dictionary::new();
|
||||
dict.insert(
|
||||
"CFBundleTypeExtensions".into(),
|
||||
plist::Value::Array(
|
||||
association
|
||||
.ext
|
||||
.iter()
|
||||
.map(|ext| ext.to_string().into())
|
||||
.collect(),
|
||||
),
|
||||
);
|
||||
|
||||
if !association.ext.is_empty() {
|
||||
dict.insert(
|
||||
"CFBundleTypeExtensions".into(),
|
||||
plist::Value::Array(
|
||||
association
|
||||
.ext
|
||||
.iter()
|
||||
.map(|ext| ext.to_string().into())
|
||||
.collect(),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
if let Some(content_types) = &association.content_types {
|
||||
dict.insert(
|
||||
"LSItemContentTypes".into(),
|
||||
plist::Value::Array(content_types.iter().map(|s| s.to_string().into()).collect()),
|
||||
);
|
||||
}
|
||||
|
||||
dict.insert(
|
||||
"CFBundleTypeName".into(),
|
||||
association
|
||||
@ -297,6 +368,7 @@ fn create_info_plist(
|
||||
plist::Value::Array(
|
||||
protocols
|
||||
.iter()
|
||||
.filter(|p| !p.schemes.is_empty())
|
||||
.map(|protocol| {
|
||||
let mut dict = plist::Dictionary::new();
|
||||
dict.insert(
|
||||
@ -347,8 +419,11 @@ fn create_info_plist(
|
||||
plist.insert("NSAppTransportSecurity".into(), security.into());
|
||||
}
|
||||
|
||||
if let Some(user_plist_path) = &settings.macos().info_plist_path {
|
||||
let user_plist = plist::Value::from_file(user_plist_path)?;
|
||||
if let Some(user_plist) = &settings.macos().info_plist {
|
||||
let user_plist = match user_plist {
|
||||
PlistKind::Path(path) => plist::Value::from_file(path)?,
|
||||
PlistKind::Plist(value) => value.clone(),
|
||||
};
|
||||
if let Some(dict) = user_plist.into_dictionary() {
|
||||
for (key, value) in dict {
|
||||
plist.insert(key, value);
|
||||
@ -380,18 +455,12 @@ fn copy_frameworks_to_bundle(
|
||||
) -> crate::Result<Vec<SignTarget>> {
|
||||
let mut paths = Vec::new();
|
||||
|
||||
let frameworks = settings
|
||||
.macos()
|
||||
.frameworks
|
||||
.as_ref()
|
||||
.cloned()
|
||||
.unwrap_or_default();
|
||||
let frameworks = settings.macos().frameworks.clone().unwrap_or_default();
|
||||
if frameworks.is_empty() {
|
||||
return Ok(paths);
|
||||
}
|
||||
let dest_dir = bundle_directory.join("Frameworks");
|
||||
fs::create_dir_all(bundle_directory)
|
||||
.with_context(|| format!("Failed to create Frameworks directory at {dest_dir:?}"))?;
|
||||
fs::create_dir_all(&dest_dir).fs_context("failed to create Frameworks directory", &dest_dir)?;
|
||||
for framework in frameworks.iter() {
|
||||
if framework.ends_with(".framework") {
|
||||
let src_path = PathBuf::from(framework);
|
||||
@ -405,9 +474,7 @@ fn copy_frameworks_to_bundle(
|
||||
} else if framework.ends_with(".dylib") {
|
||||
let src_path = PathBuf::from(framework);
|
||||
if !src_path.exists() {
|
||||
return Err(crate::Error::GenericError(format!(
|
||||
"Library not found: {framework}"
|
||||
)));
|
||||
return Err(GenericError(format!("Library not found: {framework}")));
|
||||
}
|
||||
let src_name = src_path.file_name().expect("Couldn't get library filename");
|
||||
let dest_path = dest_dir.join(src_name);
|
||||
@ -418,7 +485,7 @@ fn copy_frameworks_to_bundle(
|
||||
});
|
||||
continue;
|
||||
} else if framework.contains('/') {
|
||||
return Err(crate::Error::GenericError(format!(
|
||||
return Err(GenericError(format!(
|
||||
"Framework path should have .framework extension: {framework}"
|
||||
)));
|
||||
}
|
||||
@ -436,7 +503,7 @@ fn copy_frameworks_to_bundle(
|
||||
{
|
||||
continue;
|
||||
}
|
||||
return Err(crate::Error::GenericError(format!(
|
||||
return Err(GenericError(format!(
|
||||
"Could not locate framework: {framework}"
|
||||
)));
|
||||
}
|
||||
@ -529,3 +596,153 @@ fn add_nested_code_sign_path(src_path: &Path, dest_path: &Path, sign_paths: &mut
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::bundle::{BundleSettings, MacOsSettings, PackageSettings, SettingsBuilder};
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
fs,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
/// Helper that builds a `Settings` instance and bundle directory for tests.
|
||||
/// It receives a mapping of bundle-relative paths to source paths and
|
||||
/// returns the generated bundle directory and settings.
|
||||
fn create_test_bundle(
|
||||
project_dir: &Path,
|
||||
files: HashMap<PathBuf, PathBuf>,
|
||||
) -> (PathBuf, crate::bundle::Settings) {
|
||||
let macos_settings = MacOsSettings {
|
||||
files,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let settings = SettingsBuilder::new()
|
||||
.project_out_directory(project_dir)
|
||||
.package_settings(PackageSettings {
|
||||
product_name: "TestApp".into(),
|
||||
version: "0.1.0".into(),
|
||||
description: "test".into(),
|
||||
homepage: None,
|
||||
authors: None,
|
||||
default_run: None,
|
||||
})
|
||||
.bundle_settings(BundleSettings {
|
||||
macos: macos_settings,
|
||||
..Default::default()
|
||||
})
|
||||
.target("x86_64-apple-darwin".into())
|
||||
.build()
|
||||
.expect("failed to build settings");
|
||||
|
||||
let bundle_dir = project_dir.join("TestApp.app/Contents");
|
||||
fs::create_dir_all(&bundle_dir).expect("failed to create bundle dir");
|
||||
|
||||
(bundle_dir, settings)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_copy_custom_file_to_bundle_file() {
|
||||
let tmp_dir = tempfile::tempdir().expect("failed to create temp dir");
|
||||
|
||||
// Prepare a single file to copy.
|
||||
let src_file = tmp_dir.path().join("sample.txt");
|
||||
fs::write(&src_file, b"hello tauri").expect("failed to write sample file");
|
||||
|
||||
let files_map = HashMap::from([(PathBuf::from("Resources/sample.txt"), src_file.clone())]);
|
||||
|
||||
let (bundle_dir, settings) = create_test_bundle(tmp_dir.path(), files_map);
|
||||
|
||||
copy_custom_files_to_bundle(&bundle_dir, &settings)
|
||||
.expect("copy_custom_files_to_bundle failed");
|
||||
|
||||
let dest_file = bundle_dir.join("Resources/sample.txt");
|
||||
assert!(dest_file.exists() && dest_file.is_file());
|
||||
assert_eq!(fs::read_to_string(dest_file).unwrap(), "hello tauri");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_copy_custom_file_to_bundle_dir() {
|
||||
let tmp_dir = tempfile::tempdir().expect("failed to create temp dir");
|
||||
|
||||
// Create a source directory with a nested file.
|
||||
let src_dir = tmp_dir.path().join("assets");
|
||||
fs::create_dir_all(&src_dir).expect("failed to create assets directory");
|
||||
let nested_file = src_dir.join("nested.txt");
|
||||
fs::write(&nested_file, b"nested").expect("failed to write nested file");
|
||||
|
||||
let files_map = HashMap::from([(PathBuf::from("MyAssets"), src_dir.clone())]);
|
||||
|
||||
let (bundle_dir, settings) = create_test_bundle(tmp_dir.path(), files_map);
|
||||
|
||||
copy_custom_files_to_bundle(&bundle_dir, &settings)
|
||||
.expect("copy_custom_files_to_bundle failed");
|
||||
|
||||
let dest_nested_file = bundle_dir.join("MyAssets/nested.txt");
|
||||
assert!(
|
||||
dest_nested_file.exists(),
|
||||
"{dest_nested_file:?} does not exist"
|
||||
);
|
||||
assert!(
|
||||
dest_nested_file.is_file(),
|
||||
"{dest_nested_file:?} is not a file"
|
||||
);
|
||||
assert_eq!(
|
||||
fs::read_to_string(dest_nested_file).unwrap().trim(),
|
||||
"nested"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_copy_custom_files_to_bundle_missing_source() {
|
||||
let tmp_dir = tempfile::tempdir().expect("failed to create temp dir");
|
||||
|
||||
// Intentionally reference a non-existent path.
|
||||
let missing_path = tmp_dir.path().join("does_not_exist.txt");
|
||||
|
||||
let files_map = HashMap::from([(PathBuf::from("Missing.txt"), missing_path)]);
|
||||
|
||||
let (bundle_dir, settings) = create_test_bundle(tmp_dir.path(), files_map);
|
||||
|
||||
let result = copy_custom_files_to_bundle(&bundle_dir, &settings);
|
||||
|
||||
assert!(result.is_err());
|
||||
assert!(result.err().unwrap().to_string().contains("does not exist"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_copy_custom_files_to_bundle_invalid_source() {
|
||||
let tmp_dir = tempfile::tempdir().expect("failed to create temp dir");
|
||||
|
||||
let files_map = HashMap::from([(PathBuf::from("Invalid.txt"), PathBuf::from("///"))]);
|
||||
|
||||
let (bundle_dir, settings) = create_test_bundle(tmp_dir.path(), files_map);
|
||||
|
||||
let result = copy_custom_files_to_bundle(&bundle_dir, &settings);
|
||||
assert!(result.is_err());
|
||||
assert!(result
|
||||
.err()
|
||||
.unwrap()
|
||||
.to_string()
|
||||
.contains("Failed to copy directory"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_copy_custom_files_to_bundle_dev_null() {
|
||||
let tmp_dir = tempfile::tempdir().expect("failed to create temp dir");
|
||||
|
||||
let files_map = HashMap::from([(PathBuf::from("Invalid.txt"), PathBuf::from("/dev/null"))]);
|
||||
|
||||
let (bundle_dir, settings) = create_test_bundle(tmp_dir.path(), files_map);
|
||||
|
||||
let result = copy_custom_files_to_bundle(&bundle_dir, &settings);
|
||||
assert!(result.is_err());
|
||||
assert!(result
|
||||
.err()
|
||||
.unwrap()
|
||||
.to_string()
|
||||
.contains("is not a file or directory."));
|
||||
}
|
||||
}
|
||||
|
||||
@ -6,12 +6,11 @@
|
||||
use super::{app, icon::create_icns_file};
|
||||
use crate::{
|
||||
bundle::{settings::Arch, Bundle},
|
||||
error::{Context, ErrorExt},
|
||||
utils::CommandExt,
|
||||
PackageType, Settings,
|
||||
};
|
||||
|
||||
use anyhow::Context;
|
||||
|
||||
use std::{
|
||||
env,
|
||||
fs::{self, write},
|
||||
@ -68,10 +67,9 @@ pub fn bundle_project(settings: &Settings, bundles: &[Bundle]) -> crate::Result<
|
||||
|
||||
for path in &[&support_directory_path, &output_path] {
|
||||
if path.exists() {
|
||||
fs::remove_dir_all(path).with_context(|| format!("Failed to remove old {dmg_name}"))?;
|
||||
fs::remove_dir_all(path).fs_context("failed to remove old dmg", path.to_path_buf())?;
|
||||
}
|
||||
fs::create_dir_all(path)
|
||||
.with_context(|| format!("Failed to create output directory at {path:?}"))?;
|
||||
fs::create_dir_all(path).fs_context("failed to create output directory", path.to_path_buf())?;
|
||||
}
|
||||
|
||||
// create paths for script
|
||||
|
||||
@ -14,11 +14,11 @@
|
||||
// explanation.
|
||||
|
||||
use crate::{
|
||||
error::{Context, ErrorExt},
|
||||
utils::{self, fs_utils},
|
||||
Settings,
|
||||
};
|
||||
|
||||
use anyhow::Context;
|
||||
use image::{codecs::png::PngDecoder, GenericImageView, ImageDecoder};
|
||||
|
||||
use std::{
|
||||
@ -45,10 +45,10 @@ pub fn bundle_project(settings: &Settings) -> crate::Result<Vec<PathBuf>> {
|
||||
|
||||
if app_bundle_path.exists() {
|
||||
fs::remove_dir_all(&app_bundle_path)
|
||||
.with_context(|| format!("Failed to remove old {app_product_name}"))?;
|
||||
.fs_context("failed to remove old app bundle", &app_bundle_path)?;
|
||||
}
|
||||
fs::create_dir_all(&app_bundle_path)
|
||||
.with_context(|| format!("Failed to create bundle directory at {app_bundle_path:?}"))?;
|
||||
.fs_context("failed to create bundle directory", &app_bundle_path)?;
|
||||
|
||||
for src in settings.resource_files() {
|
||||
let src = src?;
|
||||
|
||||
@ -6,10 +6,10 @@
|
||||
use std::{
|
||||
env::{var, var_os},
|
||||
ffi::OsString,
|
||||
path::{Path, PathBuf},
|
||||
path::PathBuf,
|
||||
};
|
||||
|
||||
use crate::Settings;
|
||||
use crate::{error::NotarizeAuthError, Entitlements, Settings};
|
||||
|
||||
pub struct SignTarget {
|
||||
pub path: PathBuf,
|
||||
@ -23,11 +23,14 @@ pub fn keychain(identity: Option<&str>) -> crate::Result<Option<tauri_macos_sign
|
||||
) {
|
||||
// import user certificate - useful for for CI build
|
||||
let keychain =
|
||||
tauri_macos_sign::Keychain::with_certificate(&certificate_encoded, &certificate_password)?;
|
||||
tauri_macos_sign::Keychain::with_certificate(&certificate_encoded, &certificate_password)
|
||||
.map_err(Box::new)?;
|
||||
if let Some(identity) = identity {
|
||||
let certificate_identity = keychain.signing_identity();
|
||||
if !certificate_identity.contains(identity) {
|
||||
return Err(anyhow::anyhow!("certificate from APPLE_CERTIFICATE \"{certificate_identity}\" environment variable does not match provided identity \"{identity}\"").into());
|
||||
return Err(crate::Error::GenericError(format!(
|
||||
"certificate from APPLE_CERTIFICATE \"{certificate_identity}\" environment variable does not match provided identity \"{identity}\""
|
||||
)));
|
||||
}
|
||||
}
|
||||
Ok(Some(keychain))
|
||||
@ -48,16 +51,23 @@ pub fn sign(
|
||||
log::info!(action = "Signing"; "with identity \"{}\"", keychain.signing_identity());
|
||||
|
||||
for target in targets {
|
||||
let entitlements_path = if target.is_an_executable {
|
||||
settings.macos().entitlements.as_ref().map(Path::new)
|
||||
} else {
|
||||
None
|
||||
let (entitlements_path, _temp_file) = match settings.macos().entitlements.as_ref() {
|
||||
Some(Entitlements::Path(path)) => (Some(path.to_owned()), None),
|
||||
Some(Entitlements::Plist(plist)) => {
|
||||
let mut temp_file = tempfile::NamedTempFile::new()?;
|
||||
plist::to_writer_xml(temp_file.as_file_mut(), &plist)?;
|
||||
(Some(temp_file.path().to_path_buf()), Some(temp_file))
|
||||
}
|
||||
None => (None, None),
|
||||
};
|
||||
keychain.sign(
|
||||
&target.path,
|
||||
entitlements_path,
|
||||
target.is_an_executable && settings.macos().hardened_runtime,
|
||||
)?;
|
||||
|
||||
keychain
|
||||
.sign(
|
||||
&target.path,
|
||||
entitlements_path.as_deref(),
|
||||
target.is_an_executable && settings.macos().hardened_runtime,
|
||||
)
|
||||
.map_err(Box::new)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@ -68,7 +78,9 @@ pub fn notarize(
|
||||
app_bundle_path: PathBuf,
|
||||
credentials: &tauri_macos_sign::AppleNotarizationCredentials,
|
||||
) -> crate::Result<()> {
|
||||
tauri_macos_sign::notarize(keychain, &app_bundle_path, credentials).map_err(Into::into)
|
||||
tauri_macos_sign::notarize(keychain, &app_bundle_path, credentials)
|
||||
.map_err(Box::new)
|
||||
.map_err(Into::into)
|
||||
}
|
||||
|
||||
pub fn notarize_without_stapling(
|
||||
@ -77,19 +89,10 @@ pub fn notarize_without_stapling(
|
||||
credentials: &tauri_macos_sign::AppleNotarizationCredentials,
|
||||
) -> crate::Result<()> {
|
||||
tauri_macos_sign::notarize_without_stapling(keychain, &app_bundle_path, credentials)
|
||||
.map_err(Box::new)
|
||||
.map_err(Into::into)
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum NotarizeAuthError {
|
||||
#[error(
|
||||
"The team ID is now required for notarization with app-specific password as authentication. Please set the `APPLE_TEAM_ID` environment variable. You can find the team ID in https://developer.apple.com/account#MembershipDetailsCard."
|
||||
)]
|
||||
MissingTeamId,
|
||||
#[error(transparent)]
|
||||
Anyhow(#[from] anyhow::Error),
|
||||
}
|
||||
|
||||
pub fn notarize_auth() -> Result<tauri_macos_sign::AppleNotarizationCredentials, NotarizeAuthError>
|
||||
{
|
||||
match (
|
||||
@ -106,10 +109,18 @@ pub fn notarize_auth() -> Result<tauri_macos_sign::AppleNotarizationCredentials,
|
||||
}
|
||||
(Some(_apple_id), Some(_password), None) => Err(NotarizeAuthError::MissingTeamId),
|
||||
_ => {
|
||||
match (var_os("APPLE_API_KEY"), var_os("APPLE_API_ISSUER"), var("APPLE_API_KEY_PATH")) {
|
||||
match (
|
||||
var_os("APPLE_API_KEY"),
|
||||
var_os("APPLE_API_ISSUER"),
|
||||
var("APPLE_API_KEY_PATH"),
|
||||
) {
|
||||
(Some(key_id), Some(issuer), Ok(key_path)) => {
|
||||
Ok(tauri_macos_sign::AppleNotarizationCredentials::ApiKey { key_id, key: tauri_macos_sign::ApiKey::Path( key_path.into()), issuer })
|
||||
},
|
||||
Ok(tauri_macos_sign::AppleNotarizationCredentials::ApiKey {
|
||||
key_id,
|
||||
key: tauri_macos_sign::ApiKey::Path(key_path.into()),
|
||||
issuer,
|
||||
})
|
||||
}
|
||||
(Some(key_id), Some(issuer), Err(_)) => {
|
||||
let mut api_key_file_name = OsString::from("AuthKey_");
|
||||
api_key_file_name.push(&key_id);
|
||||
@ -131,12 +142,18 @@ pub fn notarize_auth() -> Result<tauri_macos_sign::AppleNotarizationCredentials,
|
||||
}
|
||||
|
||||
if let Some(key_path) = key_path {
|
||||
Ok(tauri_macos_sign::AppleNotarizationCredentials::ApiKey { key_id, key: tauri_macos_sign::ApiKey::Path(key_path), issuer })
|
||||
Ok(tauri_macos_sign::AppleNotarizationCredentials::ApiKey {
|
||||
key_id,
|
||||
key: tauri_macos_sign::ApiKey::Path(key_path),
|
||||
issuer,
|
||||
})
|
||||
} else {
|
||||
Err(anyhow::anyhow!("could not find API key file. Please set the APPLE_API_KEY_PATH environment variables to the path to the {api_key_file_name:?} file").into())
|
||||
Err(NotarizeAuthError::MissingApiKey {
|
||||
file_name: api_key_file_name.to_string_lossy().into_owned(),
|
||||
})
|
||||
}
|
||||
}
|
||||
_ => Err(anyhow::anyhow!("no APPLE_ID & APPLE_PASSWORD & APPLE_TEAM_ID or APPLE_API_KEY & APPLE_API_ISSUER & APPLE_API_KEY_PATH environment variables found").into())
|
||||
_ => Err(NotarizeAuthError::MissingCredentials),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -4,8 +4,7 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
use super::category::AppCategory;
|
||||
use crate::{bundle::platform::target_triple, utils::fs_utils};
|
||||
use anyhow::Context;
|
||||
use crate::{bundle::platform::target_triple, error::Context, utils::fs_utils};
|
||||
pub use tauri_utils::config::WebviewInstallMode;
|
||||
use tauri_utils::{
|
||||
config::{
|
||||
@ -361,10 +360,28 @@ pub struct MacOsSettings {
|
||||
pub hardened_runtime: bool,
|
||||
/// Provider short name for notarization.
|
||||
pub provider_short_name: Option<String>,
|
||||
/// Path or contents of the entitlements.plist file.
|
||||
pub entitlements: Option<Entitlements>,
|
||||
/// Path to the Info.plist file or raw plist value to merge with the bundle Info.plist.
|
||||
pub info_plist: Option<PlistKind>,
|
||||
}
|
||||
|
||||
/// Entitlements for macOS code signing.
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum Entitlements {
|
||||
/// Path to the entitlements.plist file.
|
||||
pub entitlements: Option<String>,
|
||||
/// Path to the Info.plist file for the bundle.
|
||||
pub info_plist_path: Option<PathBuf>,
|
||||
Path(PathBuf),
|
||||
/// Raw plist::Value.
|
||||
Plist(plist::Value),
|
||||
}
|
||||
|
||||
/// Plist format.
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum PlistKind {
|
||||
/// Path to a .plist file.
|
||||
Path(PathBuf),
|
||||
/// Raw plist value.
|
||||
Plist(plist::Value),
|
||||
}
|
||||
|
||||
/// Configuration for a target language for the WiX build.
|
||||
@ -969,7 +986,6 @@ impl Settings {
|
||||
.iter()
|
||||
.find(|bin| bin.main)
|
||||
.context("failed to find main binary, make sure you have a `package > default-run` in the Cargo.toml file")
|
||||
.map_err(Into::into)
|
||||
}
|
||||
|
||||
/// Returns the file name of the binary being bundled.
|
||||
@ -979,7 +995,6 @@ impl Settings {
|
||||
.iter_mut()
|
||||
.find(|bin| bin.main)
|
||||
.context("failed to find main binary, make sure you have a `package > default-run` in the Cargo.toml file")
|
||||
.map_err(Into::into)
|
||||
}
|
||||
|
||||
/// Returns the file name of the binary being bundled.
|
||||
@ -990,7 +1005,6 @@ impl Settings {
|
||||
.find(|bin| bin.main)
|
||||
.context("failed to find main binary, make sure you have a `package > default-run` in the Cargo.toml file")
|
||||
.map(|b| b.name())
|
||||
.map_err(Into::into)
|
||||
}
|
||||
|
||||
/// Returns the path to the specified binary.
|
||||
|
||||
@ -11,6 +11,7 @@ use crate::{
|
||||
},
|
||||
Bundle,
|
||||
},
|
||||
error::{Context, ErrorExt},
|
||||
utils::fs_utils,
|
||||
Settings,
|
||||
};
|
||||
@ -22,7 +23,6 @@ use std::{
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use anyhow::Context;
|
||||
use zip::write::SimpleFileOptions;
|
||||
|
||||
// Build update
|
||||
@ -216,7 +216,9 @@ pub fn create_zip(src_file: &Path, dst_file: &Path) -> crate::Result<PathBuf> {
|
||||
.unix_permissions(0o755);
|
||||
|
||||
zip.start_file(file_name.to_string_lossy(), options)?;
|
||||
let mut f = File::open(src_file)?;
|
||||
let mut f =
|
||||
File::open(src_file).fs_context("failed to open updater ZIP file", src_file.to_path_buf())?;
|
||||
|
||||
let mut buffer = Vec::new();
|
||||
f.read_to_end(&mut buffer)?;
|
||||
zip.write_all(&buffer)?;
|
||||
|
||||
@ -14,13 +14,13 @@ use crate::{
|
||||
},
|
||||
},
|
||||
},
|
||||
error::Context,
|
||||
utils::{
|
||||
fs_utils::copy_file,
|
||||
http_utils::{download_and_verify, extract_zip, HashAlgorithm},
|
||||
CommandExt,
|
||||
},
|
||||
};
|
||||
use anyhow::{bail, Context};
|
||||
use handlebars::{html_escape, to_json, Handlebars};
|
||||
use regex::Regex;
|
||||
use serde::{Deserialize, Serialize};
|
||||
@ -279,37 +279,40 @@ fn clear_env_for_wix(cmd: &mut Command) {
|
||||
}
|
||||
}
|
||||
|
||||
fn validate_wix_version(version_str: &str) -> anyhow::Result<()> {
|
||||
fn validate_wix_version(version_str: &str) -> crate::Result<()> {
|
||||
let components = version_str
|
||||
.split('.')
|
||||
.flat_map(|c| c.parse::<u64>().ok())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
anyhow::ensure!(
|
||||
components.len() >= 3,
|
||||
"app wix version should be in the format major.minor.patch.build (build is optional)"
|
||||
);
|
||||
if components.len() < 3 {
|
||||
crate::error::bail!(
|
||||
"app wix version should be in the format major.minor.patch.build (build is optional)"
|
||||
);
|
||||
}
|
||||
|
||||
if components[0] > 255 {
|
||||
bail!("app version major number cannot be greater than 255");
|
||||
crate::error::bail!("app version major number cannot be greater than 255");
|
||||
}
|
||||
if components[1] > 255 {
|
||||
bail!("app version minor number cannot be greater than 255");
|
||||
crate::error::bail!("app version minor number cannot be greater than 255");
|
||||
}
|
||||
if components[2] > 65535 {
|
||||
bail!("app version patch number cannot be greater than 65535");
|
||||
crate::error::bail!("app version patch number cannot be greater than 65535");
|
||||
}
|
||||
|
||||
if components.len() == 4 && components[3] > 65535 {
|
||||
bail!("app version build number cannot be greater than 65535");
|
||||
crate::error::bail!("app version build number cannot be greater than 65535");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// WiX requires versions to be numeric only in a `major.minor.patch.build` format
|
||||
fn convert_version(version_str: &str) -> anyhow::Result<String> {
|
||||
let version = semver::Version::parse(version_str).context("invalid app version")?;
|
||||
fn convert_version(version_str: &str) -> crate::Result<String> {
|
||||
let version = semver::Version::parse(version_str)
|
||||
.map_err(Into::into)
|
||||
.context("invalid app version")?;
|
||||
if !version.build.is_empty() {
|
||||
let build = version.build.parse::<u64>();
|
||||
if build.map(|b| b <= 65535).unwrap_or_default() {
|
||||
@ -318,7 +321,7 @@ fn convert_version(version_str: &str) -> anyhow::Result<String> {
|
||||
version.major, version.minor, version.patch, version.build
|
||||
));
|
||||
} else {
|
||||
bail!("optional build metadata in app version must be numeric-only and cannot be greater than 65535 for msi target");
|
||||
crate::error::bail!("optional build metadata in app version must be numeric-only and cannot be greater than 65535 for msi target");
|
||||
}
|
||||
}
|
||||
|
||||
@ -330,7 +333,7 @@ fn convert_version(version_str: &str) -> anyhow::Result<String> {
|
||||
version.major, version.minor, version.patch, version.pre
|
||||
));
|
||||
} else {
|
||||
bail!("optional pre-release identifier in app version must be numeric-only and cannot be greater than 65535 for msi target");
|
||||
crate::error::bail!("optional pre-release identifier in app version must be numeric-only and cannot be greater than 65535 for msi target");
|
||||
}
|
||||
}
|
||||
|
||||
@ -387,11 +390,7 @@ fn run_candle(
|
||||
cmd.arg(ext);
|
||||
}
|
||||
clear_env_for_wix(&mut cmd);
|
||||
cmd
|
||||
.args(&args)
|
||||
.current_dir(cwd)
|
||||
.output_ok()
|
||||
.context("error running candle.exe")?;
|
||||
cmd.args(&args).current_dir(cwd).output_ok()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@ -416,11 +415,7 @@ fn run_light(
|
||||
cmd.arg(ext);
|
||||
}
|
||||
clear_env_for_wix(&mut cmd);
|
||||
cmd
|
||||
.args(&args)
|
||||
.current_dir(build_path)
|
||||
.output_ok()
|
||||
.context("error running light.exe")?;
|
||||
cmd.args(&args).current_dir(build_path).output_ok()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@ -472,8 +467,7 @@ pub fn build_wix_app_installer(
|
||||
// when we're performing code signing, we'll sign some WiX DLLs, so we make a local copy
|
||||
let wix_toolset_path = if settings.windows().can_sign() {
|
||||
let wix_path = output_path.join("wix");
|
||||
crate::utils::fs_utils::copy_dir(wix_toolset_path, &wix_path)
|
||||
.context("failed to copy wix directory")?;
|
||||
crate::utils::fs_utils::copy_dir(wix_toolset_path, &wix_path)?;
|
||||
wix_path
|
||||
} else {
|
||||
wix_toolset_path.to_path_buf()
|
||||
@ -703,7 +697,9 @@ pub fn build_wix_app_installer(
|
||||
.iter()
|
||||
.flat_map(|p| &p.schemes)
|
||||
.collect::<Vec<_>>();
|
||||
data.insert("deep_link_protocols", to_json(schemes));
|
||||
if !schemes.is_empty() {
|
||||
data.insert("deep_link_protocols", to_json(schemes));
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(path) = custom_template_path {
|
||||
@ -757,26 +753,28 @@ pub fn build_wix_app_installer(
|
||||
}
|
||||
|
||||
let main_wxs_path = output_path.join("main.wxs");
|
||||
fs::write(main_wxs_path, handlebars.render("main.wxs", &data)?)?;
|
||||
fs::write(&main_wxs_path, handlebars.render("main.wxs", &data)?)?;
|
||||
|
||||
let mut candle_inputs = vec![("main.wxs".into(), Vec::new())];
|
||||
let mut candle_inputs = vec![];
|
||||
|
||||
let current_dir = std::env::current_dir()?;
|
||||
let extension_regex = Regex::new("\"http://schemas.microsoft.com/wix/(\\w+)\"")?;
|
||||
for fragment_path in fragment_paths {
|
||||
let fragment_path = current_dir.join(fragment_path);
|
||||
let fragment_content = fs::read_to_string(&fragment_path)?;
|
||||
let fragment_handlebars = Handlebars::new();
|
||||
let fragment = fragment_handlebars.render_template(&fragment_content, &data)?;
|
||||
let input_paths =
|
||||
std::iter::once(main_wxs_path).chain(fragment_paths.iter().map(|p| current_dir.join(p)));
|
||||
|
||||
for input_path in input_paths {
|
||||
let input_content = fs::read_to_string(&input_path)?;
|
||||
let input_handlebars = Handlebars::new();
|
||||
let input = input_handlebars.render_template(&input_content, &data)?;
|
||||
let mut extensions = Vec::new();
|
||||
for cap in extension_regex.captures_iter(&fragment) {
|
||||
for cap in extension_regex.captures_iter(&input) {
|
||||
let path = wix_toolset_path.join(format!("Wix{}.dll", &cap[1]));
|
||||
if settings.windows().can_sign() {
|
||||
try_sign(&path, settings)?;
|
||||
}
|
||||
extensions.push(path);
|
||||
}
|
||||
candle_inputs.push((fragment_path, extensions));
|
||||
candle_inputs.push((input_path, extensions));
|
||||
}
|
||||
|
||||
let mut fragment_extensions = HashSet::new();
|
||||
|
||||
@ -13,15 +13,16 @@ use crate::{
|
||||
},
|
||||
},
|
||||
},
|
||||
error::ErrorExt,
|
||||
utils::{
|
||||
http_utils::{download_and_verify, verify_file_hash, HashAlgorithm},
|
||||
CommandExt,
|
||||
},
|
||||
Settings,
|
||||
Error, Settings,
|
||||
};
|
||||
use tauri_utils::display_path;
|
||||
|
||||
use anyhow::Context;
|
||||
use crate::error::Context;
|
||||
use handlebars::{to_json, Handlebars};
|
||||
use tauri_utils::config::{NSISInstallerMode, NsisCompression, WebviewInstallMode};
|
||||
|
||||
@ -35,12 +36,12 @@ use std::{
|
||||
// URLS for the NSIS toolchain.
|
||||
#[cfg(target_os = "windows")]
|
||||
const NSIS_URL: &str =
|
||||
"https://github.com/tauri-apps/binary-releases/releases/download/nsis-3/nsis-3.zip";
|
||||
"https://github.com/tauri-apps/binary-releases/releases/download/nsis-3.11/nsis-3.11.zip";
|
||||
#[cfg(target_os = "windows")]
|
||||
const NSIS_SHA1: &str = "057e83c7d82462ec394af76c87d06733605543d4";
|
||||
const NSIS_SHA1: &str = "EF7FF767E5CBD9EDD22ADD3A32C9B8F4500BB10D";
|
||||
const NSIS_TAURI_UTILS_URL: &str =
|
||||
"https://github.com/tauri-apps/nsis-tauri-utils/releases/download/nsis_tauri_utils-v0.5.1/nsis_tauri_utils.dll";
|
||||
const NSIS_TAURI_UTILS_SHA1: &str = "B053B2E5FDB97257954C8F935D80964F056520AE";
|
||||
"https://github.com/tauri-apps/nsis-tauri-utils/releases/download/nsis_tauri_utils-v0.5.2/nsis_tauri_utils.dll";
|
||||
const NSIS_TAURI_UTILS_SHA1: &str = "D0C502F45DF55C0465C9406088FF016C2E7E6817";
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
const NSIS_REQUIRED_FILES: &[&str] = &[
|
||||
@ -54,6 +55,9 @@ const NSIS_REQUIRED_FILES: &[&str] = &[
|
||||
"Include/x64.nsh",
|
||||
"Include/nsDialogs.nsh",
|
||||
"Include/WinMessages.nsh",
|
||||
"Include/Win/COM.nsh",
|
||||
"Include/Win/Propkey.nsh",
|
||||
"Include/Win/RestartManager.nsh",
|
||||
];
|
||||
const NSIS_PLUGIN_FILES: &[&str] = &[
|
||||
"NSISdl.dll",
|
||||
@ -105,8 +109,9 @@ pub fn bundle_project(settings: &Settings, updater: bool) -> crate::Result<Vec<P
|
||||
let data = download_and_verify(url, hash, *hash_algorithm)?;
|
||||
let out_path = nsis_toolset_path.join(path);
|
||||
std::fs::create_dir_all(out_path.parent().context("output path has no parent")?)
|
||||
.context("failed to create file output directory")?;
|
||||
fs::write(out_path, data).with_context(|| format!("failed to save {path}"))?;
|
||||
.fs_context("failed to create file output directory", out_path.clone())?;
|
||||
fs::write(&out_path, data)
|
||||
.fs_context("failed to save NSIS downloaded file", out_path.clone())?;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -123,7 +128,7 @@ fn get_and_extract_nsis(nsis_toolset_path: &Path, _tauri_tools_path: &Path) -> c
|
||||
let data = download_and_verify(NSIS_URL, NSIS_SHA1, HashAlgorithm::Sha1)?;
|
||||
log::info!("extracting NSIS");
|
||||
crate::utils::http_utils::extract_zip(&data, _tauri_tools_path)?;
|
||||
fs::rename(_tauri_tools_path.join("nsis-3.08"), nsis_toolset_path)?;
|
||||
fs::rename(_tauri_tools_path.join("nsis-3.11"), nsis_toolset_path)?;
|
||||
}
|
||||
|
||||
// download additional plugins
|
||||
@ -142,8 +147,9 @@ fn get_and_extract_nsis(nsis_toolset_path: &Path, _tauri_tools_path: &Path) -> c
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn try_add_numeric_build_number(version_str: &str) -> anyhow::Result<String> {
|
||||
let version = semver::Version::parse(version_str).context("invalid app version")?;
|
||||
fn try_add_numeric_build_number(version_str: &str) -> crate::Result<String> {
|
||||
let version = semver::Version::parse(version_str)
|
||||
.map_err(|error| Error::GenericError(format!("invalid app version: {error}")))?;
|
||||
if !version.build.is_empty() {
|
||||
let build = version.build.parse::<u64>();
|
||||
if build.is_ok() {
|
||||
@ -199,31 +205,39 @@ fn build_nsis_app_installer(
|
||||
.map(PathBuf::from)
|
||||
.unwrap_or_else(|| PathBuf::from("/usr/share/nsis"));
|
||||
#[cfg(target_os = "macos")]
|
||||
let system_nsis_toolset_path = std::env::var_os("NSIS_PATH")
|
||||
.map(PathBuf::from)
|
||||
.ok_or_else(|| anyhow::anyhow!("failed to resolve NSIS path"))
|
||||
.or_else(|_| {
|
||||
let mut makensis_path =
|
||||
which::which("makensis").context("failed to resolve `makensis`; did you install nsis? See https://tauri.app/distribute/windows-installer/#install-nsis for more information")?;
|
||||
// homebrew installs it as a symlink
|
||||
if makensis_path.is_symlink() {
|
||||
// read_link might return a path relative to makensis_path so we must use join() and canonicalize
|
||||
makensis_path = makensis_path
|
||||
.parent()
|
||||
.context("missing makensis parent")?
|
||||
.join(std::fs::read_link(&makensis_path).context("failed to resolve makensis symlink")?)
|
||||
.canonicalize()
|
||||
.context("failed to resolve makensis path")?;
|
||||
}
|
||||
// file structure:
|
||||
// ├── bin
|
||||
// │ ├── makensis
|
||||
// ├── share
|
||||
// │ ├── nsis
|
||||
let bin_folder = makensis_path.parent().context("missing makensis parent")?;
|
||||
let root_folder = bin_folder.parent().context("missing makensis root")?;
|
||||
crate::Result::Ok(root_folder.join("share").join("nsis"))
|
||||
let system_nsis_toolset_path = std::env::var_os("NSIS_PATH")
|
||||
.map(PathBuf::from)
|
||||
.context("failed to resolve NSIS path")
|
||||
.or_else(|_| {
|
||||
let mut makensis_path = which::which("makensis").map_err(|error| Error::CommandFailed {
|
||||
command: "makensis".to_string(),
|
||||
error: std::io::Error::other(format!("failed to find makensis: {error}")),
|
||||
})?;
|
||||
// homebrew installs it as a symlink
|
||||
if makensis_path.is_symlink() {
|
||||
// read_link might return a path relative to makensis_path so we must use join() and canonicalize
|
||||
makensis_path = makensis_path
|
||||
.parent()
|
||||
.context("missing makensis parent")?
|
||||
.join(
|
||||
std::fs::read_link(&makensis_path)
|
||||
.fs_context("failed to resolve makensis symlink", makensis_path.clone())?,
|
||||
)
|
||||
.canonicalize()
|
||||
.fs_context(
|
||||
"failed to canonicalize makensis path",
|
||||
makensis_path.clone(),
|
||||
)?;
|
||||
}
|
||||
// file structure:
|
||||
// ├── bin
|
||||
// │ ├── makensis
|
||||
// ├── share
|
||||
// │ ├── nsis
|
||||
let bin_folder = makensis_path.parent().context("missing makensis parent")?;
|
||||
let root_folder = bin_folder.parent().context("missing makensis root")?;
|
||||
crate::Result::Ok(root_folder.join("share").join("nsis"))
|
||||
})?;
|
||||
#[cfg(windows)]
|
||||
let system_nsis_toolset_path = nsis_toolset_path.to_path_buf();
|
||||
|
||||
@ -284,8 +298,12 @@ fn build_nsis_app_installer(
|
||||
data.insert("copyright", to_json(settings.copyright_string()));
|
||||
|
||||
if settings.windows().can_sign() {
|
||||
let sign_cmd = format!("{:?}", sign_command("%1", &settings.sign_params())?);
|
||||
data.insert("uninstaller_sign_cmd", to_json(sign_cmd));
|
||||
if settings.no_sign() {
|
||||
log::warn!("Skipping signing for NSIS uninstaller due to --no-sign flag.");
|
||||
} else {
|
||||
let sign_cmd = format!("{:?}", sign_command("%1", &settings.sign_params())?);
|
||||
data.insert("uninstaller_sign_cmd", to_json(sign_cmd));
|
||||
}
|
||||
}
|
||||
|
||||
let version = settings.version_string();
|
||||
@ -484,7 +502,9 @@ fn build_nsis_app_installer(
|
||||
.iter()
|
||||
.flat_map(|p| &p.schemes)
|
||||
.collect::<Vec<_>>();
|
||||
data.insert("deep_link_protocols", to_json(schemes));
|
||||
if !schemes.is_empty() {
|
||||
data.insert("deep_link_protocols", to_json(schemes));
|
||||
}
|
||||
}
|
||||
|
||||
let silent_webview2_install = if let WebviewInstallMode::DownloadBootstrapper { silent }
|
||||
@ -601,13 +621,16 @@ fn build_nsis_app_installer(
|
||||
fs::create_dir_all(nsis_installer_path.parent().unwrap())?;
|
||||
|
||||
if settings.windows().can_sign() {
|
||||
log::info!("Signing NSIS plugins");
|
||||
for dll in NSIS_PLUGIN_FILES {
|
||||
let path = additional_plugins_path.join(dll);
|
||||
if path.exists() {
|
||||
try_sign(&path, settings)?;
|
||||
} else {
|
||||
log::warn!("Could not find {}, skipping signing", path.display());
|
||||
if let Some(plugin_copy_path) = &maybe_plugin_copy_path {
|
||||
let plugin_copy_path = plugin_copy_path.join("x86-unicode");
|
||||
log::info!("Signing NSIS plugins");
|
||||
for dll in NSIS_PLUGIN_FILES {
|
||||
let path = plugin_copy_path.join(dll);
|
||||
if path.exists() {
|
||||
try_sign(&path, settings)?;
|
||||
} else {
|
||||
log::warn!("Could not find {}, skipping signing", path.display());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -636,7 +659,10 @@ fn build_nsis_app_installer(
|
||||
.env_remove("NSISCONFDIR")
|
||||
.current_dir(output_path)
|
||||
.piped()
|
||||
.context("error running makensis.exe")?;
|
||||
.map_err(|error| Error::CommandFailed {
|
||||
command: "makensis.exe".to_string(),
|
||||
error,
|
||||
})?;
|
||||
|
||||
fs::rename(nsis_output_path, &nsis_installer_path)?;
|
||||
|
||||
@ -808,7 +834,11 @@ fn generate_estimated_size(
|
||||
.chain(resources.keys())
|
||||
{
|
||||
size += std::fs::metadata(k)
|
||||
.with_context(|| format!("when getting size of {}", k.display()))?
|
||||
.map_err(|error| Error::Fs {
|
||||
context: "when getting size of",
|
||||
path: k.to_path_buf(),
|
||||
error,
|
||||
})?
|
||||
.len();
|
||||
}
|
||||
Ok(size / 1024)
|
||||
|
||||
@ -48,6 +48,7 @@
|
||||
Pop $R0
|
||||
Sleep 500
|
||||
${If} $R0 = 0
|
||||
${OrIf} $R0 = 2
|
||||
Goto app_check_done_${UniqueID}
|
||||
${Else}
|
||||
IfSilent silent_${UniqueID} ui_${UniqueID}
|
||||
|
||||
@ -266,8 +266,7 @@ pub fn try_sign<P: AsRef<Path>>(file_path: P, settings: &Settings) -> crate::Res
|
||||
pub fn should_sign(file_path: &Path) -> crate::Result<bool> {
|
||||
let is_binary = file_path
|
||||
.extension()
|
||||
.and_then(|extension| extension.to_str())
|
||||
.is_some_and(|extension| matches!(extension, "exe" | "dll"));
|
||||
.is_some_and(|ext| ext == "exe" || ext == "dll");
|
||||
if !is_binary {
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
@ -27,17 +27,14 @@ pub fn webview2_guid_path(url: &str) -> crate::Result<(String, String)> {
|
||||
let response = agent.head(url).call().map_err(Box::new)?;
|
||||
let final_url = response.get_uri().to_string();
|
||||
let remaining_url = final_url.strip_prefix(WEBVIEW2_URL_PREFIX).ok_or_else(|| {
|
||||
anyhow::anyhow!(
|
||||
"WebView2 URL prefix mismatch. Expected `{}`, found `{}`.",
|
||||
WEBVIEW2_URL_PREFIX,
|
||||
final_url
|
||||
)
|
||||
crate::Error::GenericError(format!(
|
||||
"WebView2 URL prefix mismatch. Expected `{WEBVIEW2_URL_PREFIX}`, found `{final_url}`."
|
||||
))
|
||||
})?;
|
||||
let (guid, filename) = remaining_url.split_once('/').ok_or_else(|| {
|
||||
anyhow::anyhow!(
|
||||
"WebView2 URL format mismatch. Expected `<GUID>/<FILENAME>`, found `{}`.",
|
||||
remaining_url
|
||||
)
|
||||
crate::Error::GenericError(format!(
|
||||
"WebView2 URL format mismatch. Expected `<GUID>/<FILENAME>`, found `{remaining_url}`."
|
||||
))
|
||||
})?;
|
||||
Ok((guid.into(), filename.into()))
|
||||
}
|
||||
|
||||
@ -3,17 +3,45 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
use std::{io, num, path};
|
||||
use std::{
|
||||
fmt::Display,
|
||||
io, num,
|
||||
path::{self, PathBuf},
|
||||
};
|
||||
use thiserror::Error as DeriveError;
|
||||
|
||||
/// Errors returned by the bundler.
|
||||
#[derive(Debug, DeriveError)]
|
||||
#[non_exhaustive]
|
||||
pub enum Error {
|
||||
/// Error with context. Created by the [`Context`] trait.
|
||||
#[error("{0}: {1}")]
|
||||
Context(String, Box<Self>),
|
||||
/// File system error.
|
||||
#[error("{context} {path}: {error}")]
|
||||
Fs {
|
||||
/// Context of the error.
|
||||
context: &'static str,
|
||||
/// Path that was accessed.
|
||||
path: PathBuf,
|
||||
/// Error that occurred.
|
||||
error: io::Error,
|
||||
},
|
||||
/// Child process error.
|
||||
#[error("failed to run command {command}: {error}")]
|
||||
CommandFailed {
|
||||
/// Command that failed.
|
||||
command: String,
|
||||
/// Error that occurred.
|
||||
error: io::Error,
|
||||
},
|
||||
/// Error running tauri_utils API.
|
||||
#[error("{0}")]
|
||||
Resource(#[from] tauri_utils::Error),
|
||||
/// Bundler error.
|
||||
///
|
||||
/// This variant is no longer used as this crate no longer uses anyhow.
|
||||
// TODO(v3): remove this variant
|
||||
#[error("{0:#}")]
|
||||
BundlerError(#[from] anyhow::Error),
|
||||
/// I/O error.
|
||||
@ -71,7 +99,14 @@ pub enum Error {
|
||||
#[error("Wrong package type {0} for platform {1}")]
|
||||
InvalidPackageType(String, String),
|
||||
/// Bundle type symbol missing in binary
|
||||
#[error("__TAURI_BUNDLE_TYPE variable not found in binary. Make sure tauri crate and tauri-cli are up to date")]
|
||||
#[cfg_attr(
|
||||
target_os = "linux",
|
||||
error("__TAURI_BUNDLE_TYPE variable not found in binary. Make sure tauri crate and tauri-cli are up to date and that symbol stripping is disabled (https://doc.rust-lang.org/cargo/reference/profiles.html#strip)")
|
||||
)]
|
||||
#[cfg_attr(
|
||||
not(target_os = "linux"),
|
||||
error("__TAURI_BUNDLE_TYPE variable not found in binary. Make sure tauri crate and tauri-cli are up to date")
|
||||
)]
|
||||
MissingBundleTypeVar,
|
||||
/// Failed to write binary file changed
|
||||
#[error("Failed to write binary file changes: `{0}`")]
|
||||
@ -133,7 +168,110 @@ pub enum Error {
|
||||
#[cfg(target_os = "linux")]
|
||||
#[error("{0}")]
|
||||
RpmError(#[from] rpm::Error),
|
||||
/// Failed to notarize application.
|
||||
#[cfg(target_os = "macos")]
|
||||
#[error("failed to notarize app: {0}")]
|
||||
AppleNotarization(#[from] NotarizeAuthError),
|
||||
/// Failed to codesign application.
|
||||
#[cfg(target_os = "macos")]
|
||||
#[error("failed codesign application: {0}")]
|
||||
AppleCodesign(#[from] Box<tauri_macos_sign::Error>),
|
||||
/// Handlebars template error.
|
||||
#[error(transparent)]
|
||||
Template(#[from] handlebars::TemplateError),
|
||||
/// Semver error.
|
||||
#[error("`{0}`")]
|
||||
SemverError(#[from] semver::Error),
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
#[allow(clippy::enum_variant_names)]
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum NotarizeAuthError {
|
||||
#[error(
|
||||
"The team ID is now required for notarization with app-specific password as authentication. Please set the `APPLE_TEAM_ID` environment variable. You can find the team ID in https://developer.apple.com/account#MembershipDetailsCard."
|
||||
)]
|
||||
MissingTeamId,
|
||||
#[error("could not find API key file. Please set the APPLE_API_KEY_PATH environment variables to the path to the {file_name} file")]
|
||||
MissingApiKey { file_name: String },
|
||||
#[error("no APPLE_ID & APPLE_PASSWORD & APPLE_TEAM_ID or APPLE_API_KEY & APPLE_API_ISSUER & APPLE_API_KEY_PATH environment variables found")]
|
||||
MissingCredentials,
|
||||
}
|
||||
|
||||
/// Convenient type alias of Result type.
|
||||
pub type Result<T> = std::result::Result<T, Error>;
|
||||
|
||||
pub trait Context<T> {
|
||||
// Required methods
|
||||
fn context<C>(self, context: C) -> Result<T>
|
||||
where
|
||||
C: Display + Send + Sync + 'static;
|
||||
fn with_context<C, F>(self, f: F) -> Result<T>
|
||||
where
|
||||
C: Display + Send + Sync + 'static,
|
||||
F: FnOnce() -> C;
|
||||
}
|
||||
|
||||
impl<T> Context<T> for Result<T> {
|
||||
fn context<C>(self, context: C) -> Result<T>
|
||||
where
|
||||
C: Display + Send + Sync + 'static,
|
||||
{
|
||||
self.map_err(|e| Error::Context(context.to_string(), Box::new(e)))
|
||||
}
|
||||
|
||||
fn with_context<C, F>(self, f: F) -> Result<T>
|
||||
where
|
||||
C: Display + Send + Sync + 'static,
|
||||
F: FnOnce() -> C,
|
||||
{
|
||||
self.map_err(|e| Error::Context(f().to_string(), Box::new(e)))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Context<T> for Option<T> {
|
||||
fn context<C>(self, context: C) -> Result<T>
|
||||
where
|
||||
C: Display + Send + Sync + 'static,
|
||||
{
|
||||
self.ok_or_else(|| Error::GenericError(context.to_string()))
|
||||
}
|
||||
|
||||
fn with_context<C, F>(self, f: F) -> Result<T>
|
||||
where
|
||||
C: Display + Send + Sync + 'static,
|
||||
F: FnOnce() -> C,
|
||||
{
|
||||
self.ok_or_else(|| Error::GenericError(f().to_string()))
|
||||
}
|
||||
}
|
||||
|
||||
pub trait ErrorExt<T> {
|
||||
fn fs_context(self, context: &'static str, path: impl Into<PathBuf>) -> Result<T>;
|
||||
}
|
||||
|
||||
impl<T> ErrorExt<T> for std::result::Result<T, std::io::Error> {
|
||||
fn fs_context(self, context: &'static str, path: impl Into<PathBuf>) -> Result<T> {
|
||||
self.map_err(|error| Error::Fs {
|
||||
context,
|
||||
path: path.into(),
|
||||
error,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
macro_rules! bail {
|
||||
($msg:literal $(,)?) => {
|
||||
return Err(crate::Error::GenericError($msg.into()))
|
||||
};
|
||||
($err:expr $(,)?) => {
|
||||
return Err(crate::Error::GenericError($err))
|
||||
};
|
||||
($fmt:expr, $($arg:tt)*) => {
|
||||
return Err(crate::Error::GenericError(format!($fmt, $($arg)*)))
|
||||
};
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub(crate) use bail;
|
||||
|
||||
@ -14,6 +14,8 @@ use sha2::Digest;
|
||||
use url::Url;
|
||||
use zip::ZipArchive;
|
||||
|
||||
const BUNDLER_USER_AGENT: &str = concat!(env!("CARGO_PKG_NAME"), "/", env!("CARGO_PKG_VERSION"),);
|
||||
|
||||
fn generate_github_mirror_url_from_template(github_url: &str) -> Option<String> {
|
||||
std::env::var("TAURI_BUNDLER_TOOLS_GITHUB_MIRROR_TEMPLATE")
|
||||
.ok()
|
||||
@ -47,7 +49,15 @@ fn generate_github_alternative_url(url: &str) -> Option<(ureq::Agent, String)> {
|
||||
|
||||
generate_github_mirror_url_from_template(url)
|
||||
.or_else(|| generate_github_mirror_url_from_base(url))
|
||||
.map(|alt_url| (ureq::agent(), alt_url))
|
||||
.map(|alt_url| {
|
||||
(
|
||||
ureq::Agent::config_builder()
|
||||
.user_agent(BUNDLER_USER_AGENT)
|
||||
.build()
|
||||
.into(),
|
||||
alt_url,
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
fn create_agent_and_url(url: &str) -> (ureq::Agent, String) {
|
||||
@ -55,22 +65,21 @@ fn create_agent_and_url(url: &str) -> (ureq::Agent, String) {
|
||||
}
|
||||
|
||||
pub(crate) fn base_ureq_agent() -> ureq::Agent {
|
||||
#[allow(unused_mut)]
|
||||
let mut config_builder = ureq::Agent::config_builder()
|
||||
.user_agent(BUNDLER_USER_AGENT)
|
||||
.proxy(ureq::Proxy::try_from_env());
|
||||
|
||||
#[cfg(feature = "platform-certs")]
|
||||
let agent: ureq::Agent = ureq::Agent::config_builder()
|
||||
.tls_config(
|
||||
{
|
||||
config_builder = config_builder.tls_config(
|
||||
ureq::tls::TlsConfig::builder()
|
||||
.root_certs(ureq::tls::RootCerts::PlatformVerifier)
|
||||
.build(),
|
||||
)
|
||||
.proxy(ureq::Proxy::try_from_env())
|
||||
.build()
|
||||
.into();
|
||||
#[cfg(not(feature = "platform-certs"))]
|
||||
let agent: ureq::Agent = ureq::Agent::config_builder()
|
||||
.proxy(ureq::Proxy::try_from_env())
|
||||
.build()
|
||||
.into();
|
||||
agent
|
||||
);
|
||||
}
|
||||
|
||||
config_builder.build().into()
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
|
||||
@ -1,5 +1,108 @@
|
||||
# Changelog
|
||||
|
||||
## \[2.9.6]
|
||||
|
||||
### What's Changed
|
||||
|
||||
- [`7b1b3514d`](https://www.github.com/tauri-apps/tauri/commit/7b1b3514df771e6e9859b9f54fa4df332433948e) ([#14621](https://www.github.com/tauri-apps/tauri/pull/14621) by [@Legend-Master](https://www.github.com/tauri-apps/tauri/../../Legend-Master)) Errors like `Error Failed to parse version 2 for for NPM package tauri` when there was no `package-lock.json` file present yet or when using ones like `link:./tauri` are now only logged in `--verbose` mode.
|
||||
|
||||
### Dependencies
|
||||
|
||||
- Upgraded to `tauri-macos-sign@2.3.2`
|
||||
- Upgraded to `tauri-bundler@2.7.5`
|
||||
|
||||
## \[2.9.5]
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- [`f022b2d1a`](https://www.github.com/tauri-apps/tauri/commit/f022b2d1ae57612e39c75782926f2f341d9034a8) ([#14582](https://www.github.com/tauri-apps/tauri/pull/14582) by [@hrzlgnm](https://www.github.com/tauri-apps/tauri/../../hrzlgnm)) Fixed an issue that caused the cli to error out with missing private key, in case the option `--no-sign` was requested and the `tauri.config` has signing key set and the plugin `tauri-plugin-updater` is used.
|
||||
- [`f855caf8a`](https://www.github.com/tauri-apps/tauri/commit/f855caf8a3830aa5dd6d0b039312866a5d9c3606) ([#14481](https://www.github.com/tauri-apps/tauri/pull/14481) by [@Legend-Master](https://www.github.com/tauri-apps/tauri/../../Legend-Master)) Fixed the mismatched tauri package versions check didn't work for pnpm
|
||||
- [`79a7d9ec0`](https://www.github.com/tauri-apps/tauri/commit/79a7d9ec01be1a371b8e923848140fea75e9caed) ([#14468](https://www.github.com/tauri-apps/tauri/pull/14468) by [@FabianLars](https://www.github.com/tauri-apps/tauri/../../FabianLars)) Fixed an issue that caused the cli to print errors like `Error Failed to parse version 2 for crate tauri` when there was no `Cargo.lock` file present yet. This will still be logged in `--verbose` mode.
|
||||
|
||||
### Performance Improvements
|
||||
|
||||
- [`ce98d87ce`](https://www.github.com/tauri-apps/tauri/commit/ce98d87ce0aaa907285852eb80691197424e03c3) ([#14474](https://www.github.com/tauri-apps/tauri/pull/14474) by [@Tunglies](https://www.github.com/tauri-apps/tauri/../../Tunglies)) refactor: remove needless collect. No user facing changes.
|
||||
- [`ee3cc4a91`](https://www.github.com/tauri-apps/tauri/commit/ee3cc4a91bf1315ecaefe90f423ffd55ef6c40db) ([#14475](https://www.github.com/tauri-apps/tauri/pull/14475) by [@Tunglies](https://www.github.com/tauri-apps/tauri/../../Tunglies)) perf: remove needless clones in various files for improved performance. No user facing changes.
|
||||
|
||||
### Dependencies
|
||||
|
||||
- Upgraded to `tauri-bundler@2.7.4`
|
||||
- Upgraded to `tauri-macos-sign@2.3.1`
|
||||
- Upgraded to `tauri-utils@2.8.1`
|
||||
|
||||
## \[2.9.4]
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- [`b586ecf1f`](https://www.github.com/tauri-apps/tauri/commit/b586ecf1f4b3b087f9aa6c4668c2c18b1b7925f4) ([#14416](https://www.github.com/tauri-apps/tauri/pull/14416) by [@Legend-Master](https://www.github.com/tauri-apps/tauri/../../Legend-Master)) Premultiply Alpha before Resizing which gets rid of the gray fringe around the icons for svg images.
|
||||
|
||||
## \[2.9.3]
|
||||
|
||||
### Enhancements
|
||||
|
||||
- [`22edc65aa`](https://www.github.com/tauri-apps/tauri/commit/22edc65aad0b3e45515008e8e0866112da70c8a1) ([#14408](https://www.github.com/tauri-apps/tauri/pull/14408) by [@FabianLars](https://www.github.com/tauri-apps/tauri/../../FabianLars)) Set user-agent in bundler and cli http requests when fetching build tools.
|
||||
- [`779612ac8`](https://www.github.com/tauri-apps/tauri/commit/779612ac8425a787626da4cefdb9eaf7d63bea18) ([#14379](https://www.github.com/tauri-apps/tauri/pull/14379) by [@moubctez](https://www.github.com/tauri-apps/tauri/../../moubctez)) Properly read the `required-features` field of binaries in Cargo.toml to prevent bundling issues when the features weren't enabled.
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- [`fd8c30b4f`](https://www.github.com/tauri-apps/tauri/commit/fd8c30b4f1bca8dd7165c5c0ebe7fbfd17662153) ([#14353](https://www.github.com/tauri-apps/tauri/pull/14353) by [@ChaseKnowlden](https://www.github.com/tauri-apps/tauri/../../ChaseKnowlden)) Premultiply Alpha before Resizing which gets rid of the gray fringe around the icons.
|
||||
|
||||
### Dependencies
|
||||
|
||||
- Upgraded to `tauri-bundler@2.7.3`
|
||||
|
||||
## \[2.9.2]
|
||||
|
||||
### Dependencies
|
||||
|
||||
- Upgraded to `tauri-bundler@2.7.2`
|
||||
|
||||
## \[2.9.1]
|
||||
|
||||
### Dependencies
|
||||
|
||||
- Upgraded to `tauri-macos-sign@2.3.0`
|
||||
- Upgraded to `tauri-bundler@2.7.1`
|
||||
|
||||
## \[2.9.0]
|
||||
|
||||
### New Features
|
||||
|
||||
- [`f5851ee00`](https://www.github.com/tauri-apps/tauri/commit/f5851ee00d6d1f4d560a220ca5a728fedd525092) ([#14089](https://www.github.com/tauri-apps/tauri/pull/14089)) Adds the `scrollBarStyle` option to the window configuration.
|
||||
- [`2a06d1006`](https://www.github.com/tauri-apps/tauri/commit/2a06d10066a806e392efe8bfb16d943ee0b0b61d) ([#14052](https://www.github.com/tauri-apps/tauri/pull/14052)) Add a `--no-sign` flag to the `tauri build` and `tauri bundle` commands to skip the code signing step, improving the developer experience for local testing and development without requiring code signing keys.
|
||||
- [`3b4fac201`](https://www.github.com/tauri-apps/tauri/commit/3b4fac2017832d426dd07c5e24e26684eda57f7b) ([#14194](https://www.github.com/tauri-apps/tauri/pull/14194)) Add `tauri.conf.json > bundle > android > autoIncrementVersionCode` config option to automatically increment the Android version code.
|
||||
- [`673867aa0`](https://www.github.com/tauri-apps/tauri/commit/673867aa0e1ccd766ee879ffe96aba58c758613c) ([#14094](https://www.github.com/tauri-apps/tauri/pull/14094)) Try to detect ANDROID_HOME and NDK_HOME environment variables from default system locations and install them if needed using the Android Studio command line tools.
|
||||
- [`3d6868d09`](https://www.github.com/tauri-apps/tauri/commit/3d6868d09c323d68a152f3c3f8c7256311bd020a) ([#14128](https://www.github.com/tauri-apps/tauri/pull/14128)) Added support to defining the content type of the declared file association on macOS (maps to LSItemContentTypes property).
|
||||
- [`3d6868d09`](https://www.github.com/tauri-apps/tauri/commit/3d6868d09c323d68a152f3c3f8c7256311bd020a) ([#14128](https://www.github.com/tauri-apps/tauri/pull/14128)) Added support to defining the metadata for custom types declared in `tauri.conf.json > bundle > fileAssociations > exportedType` via the `UTExportedTypeDeclarations` Info.plist property.
|
||||
- [`ed7c9a410`](https://www.github.com/tauri-apps/tauri/commit/ed7c9a4100e08c002212265549d12130d021ad1e) ([#14108](https://www.github.com/tauri-apps/tauri/pull/14108)) Added `bundle > macOS > infoPlist` and `bundle > iOS > infoPlist` configurations to allow defining custom Info.plist extensions.
|
||||
- [`75082cc5b`](https://www.github.com/tauri-apps/tauri/commit/75082cc5b340e30e2c4b4cd4bd6a1fe5382164aa) ([#14120](https://www.github.com/tauri-apps/tauri/pull/14120)) Added `ios run` and `android run` commands to run the app in production mode.
|
||||
- [`cc8c0b531`](https://www.github.com/tauri-apps/tauri/commit/cc8c0b53171173dbd1d01781a50de1a3ea159031) ([#14031](https://www.github.com/tauri-apps/tauri/pull/14031)) Added support to universal app links on macOS with the `plugins > deep-link > desktop > domains` configuration.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- [`94cbd40fc`](https://www.github.com/tauri-apps/tauri/commit/94cbd40fc733e08c0bccd48149d22a0e9c2f1e5c) ([#14223](https://www.github.com/tauri-apps/tauri/pull/14223)) Add support for Android's adaptive and themed icons.
|
||||
- [`b5aa01870`](https://www.github.com/tauri-apps/tauri/commit/b5aa018702bf45dc98297698f9b7d238705865a6) ([#14268](https://www.github.com/tauri-apps/tauri/pull/14268)) Update cargo-mobile2 to 0.21, enhancing error messages and opening Xcode when multiple apps are installed.
|
||||
- [`55453e845`](https://www.github.com/tauri-apps/tauri/commit/55453e8453d927b8197f1ba9f26fd944482938f7) ([#14262](https://www.github.com/tauri-apps/tauri/pull/14262)) Check mismatched versions in `tauri info`
|
||||
- [`1a6627ee7`](https://www.github.com/tauri-apps/tauri/commit/1a6627ee7d085a4e66784e2705254714d68c7244) ([#14122](https://www.github.com/tauri-apps/tauri/pull/14122)) Set a default log level filter when running `tauri add log`.
|
||||
- [`b06b3bd09`](https://www.github.com/tauri-apps/tauri/commit/b06b3bd091b0fed26cdcfb23cacb0462a7a9cc2d) ([#14126](https://www.github.com/tauri-apps/tauri/pull/14126)) Improve error messages with more context.
|
||||
- [`f6622a3e3`](https://www.github.com/tauri-apps/tauri/commit/f6622a3e342f5dd5fb3cf6e0f79fb309a10e9b3d) ([#14129](https://www.github.com/tauri-apps/tauri/pull/14129)) Prompt to install the iOS platform if it isn't installed yet.
|
||||
- [`6bbb530fd`](https://www.github.com/tauri-apps/tauri/commit/6bbb530fd5edfc07b180a4f3782b8566872ca3b1) ([#14105](https://www.github.com/tauri-apps/tauri/pull/14105)) Warn if productName is empty when initializing mobile project.
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- [`19fb6f7cb`](https://www.github.com/tauri-apps/tauri/commit/19fb6f7cb0d702cb2f25f6f2d1e11014d9dada5d) ([#14146](https://www.github.com/tauri-apps/tauri/pull/14146)) Strip Windows-only extensions from the binary path so an Android project initialized on Windows can be used on UNIX systems.
|
||||
- [`19fb6f7cb`](https://www.github.com/tauri-apps/tauri/commit/19fb6f7cb0d702cb2f25f6f2d1e11014d9dada5d) ([#14146](https://www.github.com/tauri-apps/tauri/pull/14146)) Enhance Android build script usage on Windows by attempting to run cmd, bat and exe formats.
|
||||
- [`28a2f9bc5`](https://www.github.com/tauri-apps/tauri/commit/28a2f9bc55f658eb71ef1a970ff9f791346f7682) ([#14101](https://www.github.com/tauri-apps/tauri/pull/14101)) Fix iOS CLI usage after modifying the package name.
|
||||
- [`d2938486e`](https://www.github.com/tauri-apps/tauri/commit/d2938486e9d974debd90c15d7160b8a17bf4d763) ([#14261](https://www.github.com/tauri-apps/tauri/pull/14261)) Replaced the non-standard nerd font character with ` ⱼₛ ` in `tarui info`
|
||||
- [`25e920e16`](https://www.github.com/tauri-apps/tauri/commit/25e920e169db900ca4f07c2bb9eb290e9f9f2c7d) ([#14298](https://www.github.com/tauri-apps/tauri/pull/14298)) Wait for dev server to exit before exiting the CLI when the app is closed on `tauri dev --no-watch`.
|
||||
- [`b0012424c`](https://www.github.com/tauri-apps/tauri/commit/b0012424c5f432debfa42ba145e2672966d5f6d5) ([#14115](https://www.github.com/tauri-apps/tauri/pull/14115)) Resolve local IP address when `tauri.conf.json > build > devUrl` host is `0.0.0.0`.
|
||||
- [`abf7e8850`](https://www.github.com/tauri-apps/tauri/commit/abf7e8850ba41e7173e9e9a3fdd6dfb8f357d72d) ([#14118](https://www.github.com/tauri-apps/tauri/pull/14118)) Fixes mobile project initialization when using `pnpx` or `pnpm dlx`.
|
||||
|
||||
### Dependencies
|
||||
|
||||
- Upgraded to `tauri-utils@2.8.0`
|
||||
- Upgraded to `tauri-bundler@2.7.0`
|
||||
|
||||
## \[2.8.4]
|
||||
|
||||
### Enhancements
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "tauri-cli"
|
||||
version = "2.8.4"
|
||||
version = "2.9.6"
|
||||
authors = ["Tauri Programme within The Commons Conservancy"]
|
||||
edition = "2021"
|
||||
rust-version = "1.77.2"
|
||||
@ -36,7 +36,7 @@ name = "cargo-tauri"
|
||||
path = "src/main.rs"
|
||||
|
||||
[target."cfg(any(target_os = \"linux\", target_os = \"dragonfly\", target_os = \"freebsd\", target_os = \"openbsd\", target_os = \"netbsd\", target_os = \"windows\", target_os = \"macos\"))".dependencies]
|
||||
cargo-mobile2 = { version = "0.20.6", default-features = false }
|
||||
cargo-mobile2 = { version = "0.21.1", default-features = false }
|
||||
|
||||
[dependencies]
|
||||
jsonrpsee = { version = "0.24", features = ["server"] }
|
||||
@ -46,8 +46,8 @@ jsonrpsee-ws-client = { version = "0.24", default-features = false }
|
||||
sublime_fuzzy = "0.7"
|
||||
clap_complete = "4"
|
||||
clap = { version = "4", features = ["derive", "env"] }
|
||||
anyhow = "1"
|
||||
tauri-bundler = { version = "2.6.1", default-features = false, path = "../tauri-bundler" }
|
||||
thiserror = "2"
|
||||
tauri-bundler = { version = "2.7.5", default-features = false, path = "../tauri-bundler" }
|
||||
colored = "2"
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = { version = "1", features = ["preserve_order"] }
|
||||
@ -58,7 +58,7 @@ shared_child = "1"
|
||||
duct = "1.0"
|
||||
toml_edit = { version = "0.23", features = ["serde"] }
|
||||
json-patch = "3"
|
||||
tauri-utils = { version = "2.7.0", path = "../tauri-utils", features = [
|
||||
tauri-utils = { version = "2.8.1", path = "../tauri-utils", features = [
|
||||
"isolation",
|
||||
"schema",
|
||||
"config-json5",
|
||||
@ -69,7 +69,8 @@ toml = "0.9"
|
||||
jsonschema = "0.33"
|
||||
handlebars = "6"
|
||||
include_dir = "0.7"
|
||||
minisign = "=0.7.3"
|
||||
dirs = "6"
|
||||
minisign = "0.8"
|
||||
base64 = "0.22"
|
||||
ureq = { version = "3", default-features = false, features = ["gzip"] }
|
||||
os_info = "3"
|
||||
@ -110,6 +111,9 @@ memchr = "2"
|
||||
tempfile = "3"
|
||||
uuid = { version = "1", features = ["v5"] }
|
||||
rand = "0.9"
|
||||
zip = { version = "4", default-features = false, features = ["deflate"] }
|
||||
which = "8"
|
||||
rayon = "1.10"
|
||||
|
||||
[dev-dependencies]
|
||||
insta = "1"
|
||||
@ -129,7 +133,7 @@ libc = "0.2"
|
||||
|
||||
[target."cfg(target_os = \"macos\")".dependencies]
|
||||
plist = "1"
|
||||
tauri-macos-sign = { version = "2.2.0", path = "../tauri-macos-sign" }
|
||||
tauri-macos-sign = { version = "2.3.2", path = "../tauri-macos-sign" }
|
||||
object = { version = "0.36", default-features = false, features = [
|
||||
"macho",
|
||||
"read_core",
|
||||
|
||||
@ -33,7 +33,7 @@ These environment variables are inputs to the CLI which may have an equivalent C
|
||||
- See [creating API keys](https://developer.apple.com/documentation/appstoreconnectapi/creating_api_keys_for_app_store_connect_api) for more information.
|
||||
- `API_PRIVATE_KEYS_DIR` — Specify the directory where your AuthKey file is located. See `APPLE_API_KEY`.
|
||||
- `APPLE_API_ISSUER` — Issuer ID. Required if `APPLE_API_KEY` is specified.
|
||||
- `APPLE_API_KEY_PATH` - path to the API key `.p8` file. If not specified, for macOS apps the bundler searches the following directories in sequence for a private key file with the name of 'AuthKey\_<api_key>.p8': './private_keys', '~/private_keys', '~/.private_keys', and '~/.appstoreconnect/private_keys'. **For iOS this variable is required**.
|
||||
- `APPLE_API_KEY_PATH` - path to the API key `.p8` file. If not specified, for macOS apps the bundler searches the following directories in sequence for a private key file with the name of `AuthKey\_<api_key>.p8`: `./private_keys`, `~/private_keys`, `~/.private_keys`, and `~/.appstoreconnect/private_keys`. **For iOS this variable is required**.
|
||||
- `APPLE_SIGNING_IDENTITY` — The identity used to code sign. Overwrites `tauri.conf.json > bundle > macOS > signingIdentity`. If neither are set, it is inferred from `APPLE_CERTIFICATE` when provided.
|
||||
- `APPLE_PROVIDER_SHORT_NAME` — If your Apple ID is connected to multiple teams, you have to specify the provider short name of the team you want to use to notarize your app. Overwrites `tauri.conf.json > bundle > macOS > providerShortName`.
|
||||
- `APPLE_DEVELOPMENT_TEAM` — The team ID used to code sign on iOS. Overwrites `tauri.conf.json > bundle > iOS > developmentTeam`. Can be found in https://developer.apple.com/account#MembershipDetailsCard.
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"$id": "https://schema.tauri.app/config/2.8.5",
|
||||
"$id": "https://schema.tauri.app/config/2.9.5",
|
||||
"title": "Config",
|
||||
"description": "The Tauri configuration object.\n It is read from a file where you can define your frontend assets,\n configure the bundler and define a tray icon.\n\n The configuration file is generated by the\n [`tauri init`](https://v2.tauri.app/reference/cli/#init) command that lives in\n your Tauri application source directory (src-tauri).\n\n Once generated, you may modify it at will to customize your Tauri application.\n\n ## File Formats\n\n By default, the configuration is defined as a JSON file named `tauri.conf.json`.\n\n Tauri also supports JSON5 and TOML files via the `config-json5` and `config-toml` Cargo features, respectively.\n The JSON5 file name must be either `tauri.conf.json` or `tauri.conf.json5`.\n The TOML file name is `Tauri.toml`.\n\n ## Platform-Specific Configuration\n\n In addition to the default configuration file, Tauri can\n read a platform-specific configuration from `tauri.linux.conf.json`,\n `tauri.windows.conf.json`, `tauri.macos.conf.json`, `tauri.android.conf.json` and `tauri.ios.conf.json`\n (or `Tauri.linux.toml`, `Tauri.windows.toml`, `Tauri.macos.toml`, `Tauri.android.toml` and `Tauri.ios.toml` if the `Tauri.toml` format is used),\n which gets merged with the main configuration object.\n\n ## Configuration Structure\n\n The configuration is composed of the following objects:\n\n - [`app`](#appconfig): The Tauri configuration\n - [`build`](#buildconfig): The build configuration\n - [`bundle`](#bundleconfig): The bundle configurations\n - [`plugins`](#pluginconfig): The plugins configuration\n\n Example tauri.config.json file:\n\n ```json\n {\n \"productName\": \"tauri-app\",\n \"version\": \"0.1.0\",\n \"build\": {\n \"beforeBuildCommand\": \"\",\n \"beforeDevCommand\": \"\",\n \"devUrl\": \"http://localhost:3000\",\n \"frontendDist\": \"../dist\"\n },\n \"app\": {\n \"security\": {\n \"csp\": null\n },\n \"windows\": [\n {\n \"fullscreen\": false,\n \"height\": 600,\n \"resizable\": true,\n \"title\": \"Tauri App\",\n \"width\": 800\n }\n ]\n },\n \"bundle\": {},\n \"plugins\": {}\n }\n ```",
|
||||
"type": "object",
|
||||
@ -84,6 +84,7 @@
|
||||
"default": {
|
||||
"active": false,
|
||||
"android": {
|
||||
"autoIncrementVersionCode": false,
|
||||
"minSdkVersion": 24
|
||||
},
|
||||
"createUpdaterArtifacts": false,
|
||||
@ -230,7 +231,7 @@
|
||||
"type": "string"
|
||||
},
|
||||
"create": {
|
||||
"description": "Whether Tauri should create this window at app startup or not.\n\n When this is set to `false` you must manually grab the config object via `app.config().app.windows`\n and create it with [`WebviewWindowBuilder::from_config`](https://docs.rs/tauri/2/tauri/webview/struct.WebviewWindowBuilder.html#method.from_config).\n\n ## Example:\n\n ```rust\n tauri::Builder::default()\n .setup(|app| {\n tauri::WebviewWindowBuilder::from_config(app.handle(), app.config().app.windows[0])?.build()?;\n Ok(())\n });\n ```",
|
||||
"description": "Whether Tauri should create this window at app startup or not.\n\n When this is set to `false` you must manually grab the config object via `app.config().app.windows`\n and create it with [`WebviewWindowBuilder::from_config`](https://docs.rs/tauri/2/tauri/webview/struct.WebviewWindowBuilder.html#method.from_config).\n\n ## Example:\n\n ```rust\n tauri::Builder::default()\n .setup(|app| {\n tauri::WebviewWindowBuilder::from_config(app.handle(), &app.config().app.windows[0])?.build()?;\n Ok(())\n });\n ```",
|
||||
"default": true,
|
||||
"type": "boolean"
|
||||
},
|
||||
@ -920,7 +921,7 @@
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "Mica effect that matches the system dark perefence **Windows 11 Only**",
|
||||
"description": "Mica effect that matches the system dark preference **Windows 11 Only**",
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"mica"
|
||||
@ -941,7 +942,7 @@
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "Tabbed effect that matches the system dark perefence **Windows 11 Only**",
|
||||
"description": "Tabbed effect that matches the system dark preference **Windows 11 Only**",
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"tabbed"
|
||||
@ -1295,7 +1296,7 @@
|
||||
"additionalProperties": false
|
||||
},
|
||||
"FsScope": {
|
||||
"description": "Protocol scope definition.\n It is a list of glob patterns that restrict the API access from the webview.\n\n Each pattern can start with a variable that resolves to a system base directory.\n The variables are: `$AUDIO`, `$CACHE`, `$CONFIG`, `$DATA`, `$LOCALDATA`, `$DESKTOP`,\n `$DOCUMENT`, `$DOWNLOAD`, `$EXE`, `$FONT`, `$HOME`, `$PICTURE`, `$PUBLIC`, `$RUNTIME`,\n `$TEMPLATE`, `$VIDEO`, `$RESOURCE`, `$APP`, `$LOG`, `$TEMP`, `$APPCONFIG`, `$APPDATA`,\n `$APPLOCALDATA`, `$APPCACHE`, `$APPLOG`.",
|
||||
"description": "Protocol scope definition.\n It is a list of glob patterns that restrict the API access from the webview.\n\n Each pattern can start with a variable that resolves to a system base directory.\n The variables are: `$AUDIO`, `$CACHE`, `$CONFIG`, `$DATA`, `$LOCALDATA`, `$DESKTOP`,\n `$DOCUMENT`, `$DOWNLOAD`, `$EXE`, `$FONT`, `$HOME`, `$PICTURE`, `$PUBLIC`, `$RUNTIME`,\n `$TEMPLATE`, `$VIDEO`, `$RESOURCE`, `$TEMP`,\n `$APPCONFIG`, `$APPDATA`, `$APPLOCALDATA`, `$APPCACHE`, `$APPLOG`.",
|
||||
"anyOf": [
|
||||
{
|
||||
"description": "A list of paths that are allowed by this scope.",
|
||||
@ -2159,7 +2160,7 @@
|
||||
]
|
||||
},
|
||||
"fileAssociations": {
|
||||
"description": "File associations to application.",
|
||||
"description": "File types to associate with the application.",
|
||||
"type": [
|
||||
"array",
|
||||
"null"
|
||||
@ -2282,6 +2283,7 @@
|
||||
"android": {
|
||||
"description": "Android configuration.",
|
||||
"default": {
|
||||
"autoIncrementVersionCode": false,
|
||||
"minSdkVersion": 24
|
||||
},
|
||||
"allOf": [
|
||||
@ -2433,6 +2435,16 @@
|
||||
"$ref": "#/definitions/AssociationExt"
|
||||
}
|
||||
},
|
||||
"contentTypes": {
|
||||
"description": "Declare support to a file with the given content type. Maps to `LSItemContentTypes` on macOS.\n\n This allows supporting any file format declared by another application that conforms to this type.\n Declaration of new types can be done with [`Self::exported_type`] and linking to certain content types are done via [`ExportedFileAssociation::conforms_to`].",
|
||||
"type": [
|
||||
"array",
|
||||
"null"
|
||||
],
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"name": {
|
||||
"description": "The name. Maps to `CFBundleTypeName` on macOS. Default to `ext[0]`",
|
||||
"type": [
|
||||
@ -2471,6 +2483,17 @@
|
||||
"$ref": "#/definitions/HandlerRank"
|
||||
}
|
||||
]
|
||||
},
|
||||
"exportedType": {
|
||||
"description": "The exported type definition. Maps to a `UTExportedTypeDeclarations` entry on macOS.\n\n You should define this if the associated file is a custom file type defined by your application.",
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/ExportedFileAssociation"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
@ -2552,6 +2575,30 @@
|
||||
}
|
||||
]
|
||||
},
|
||||
"ExportedFileAssociation": {
|
||||
"description": "The exported type definition. Maps to a `UTExportedTypeDeclarations` entry on macOS.",
|
||||
"type": "object",
|
||||
"required": [
|
||||
"identifier"
|
||||
],
|
||||
"properties": {
|
||||
"identifier": {
|
||||
"description": "The unique identifier for the exported type. Maps to `UTTypeIdentifier`.",
|
||||
"type": "string"
|
||||
},
|
||||
"conformsTo": {
|
||||
"description": "The types that this type conforms to. Maps to `UTTypeConformsTo`.\n\n Examples are `public.data`, `public.image`, `public.json` and `public.database`.",
|
||||
"type": [
|
||||
"array",
|
||||
"null"
|
||||
],
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
"WindowsConfig": {
|
||||
"description": "Windows bundler configuration.\n\n See more: <https://v2.tauri.app/reference/config/#windowsconfig>",
|
||||
"type": "object",
|
||||
@ -3572,6 +3619,13 @@
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"infoPlist": {
|
||||
"description": "Path to a Info.plist file to merge with the default Info.plist.\n\n Note that Tauri also looks for a `Info.plist` file in the same directory as the Tauri configuration file.",
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"dmg": {
|
||||
"description": "DMG-specific settings.",
|
||||
"default": {
|
||||
@ -3743,6 +3797,13 @@
|
||||
"description": "A version string indicating the minimum iOS version that the bundled application supports. Defaults to `13.0`.\n\n Maps to the IPHONEOS_DEPLOYMENT_TARGET value.",
|
||||
"default": "14.0",
|
||||
"type": "string"
|
||||
},
|
||||
"infoPlist": {
|
||||
"description": "Path to a Info.plist file to merge with the default Info.plist.\n\n Note that Tauri also looks for a `Info.plist` and `Info.ios.plist` file in the same directory as the Tauri configuration file.",
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
@ -3767,6 +3828,11 @@
|
||||
"format": "uint32",
|
||||
"maximum": 2100000000.0,
|
||||
"minimum": 1.0
|
||||
},
|
||||
"autoIncrementVersionCode": {
|
||||
"description": "Whether to automatically increment the `versionCode` on each build.\n\n - If `true`, the generator will try to read the last `versionCode` from\n `tauri.properties` and increment it by 1 for every build.\n - If `false` or not set, it falls back to `version_code` or semver-derived logic.\n\n Note that to use this feature, you should remove `/tauri.properties` from `src-tauri/gen/android/app/.gitignore` so the current versionCode is committed to the repository.",
|
||||
"default": false,
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
|
||||
@ -1,9 +1,9 @@
|
||||
{
|
||||
"cli.js": {
|
||||
"version": "2.8.4",
|
||||
"version": "2.9.6",
|
||||
"node": ">= 10.0.0"
|
||||
},
|
||||
"tauri": "2.8.5",
|
||||
"tauri-build": "2.4.1",
|
||||
"tauri-plugin": "2.4.0"
|
||||
"tauri": "2.9.5",
|
||||
"tauri-build": "2.5.3",
|
||||
"tauri-plugin": "2.5.2"
|
||||
}
|
||||
|
||||
@ -9,6 +9,7 @@ use tauri_utils::acl::capability::{Capability, PermissionEntry};
|
||||
|
||||
use crate::{
|
||||
acl::FileFormat,
|
||||
error::ErrorExt,
|
||||
helpers::{app_paths::tauri_dir, prompts},
|
||||
Result,
|
||||
};
|
||||
@ -106,7 +107,9 @@ pub fn command(options: Options) -> Result<()> {
|
||||
};
|
||||
|
||||
let path = match options.out {
|
||||
Some(o) => o.canonicalize()?,
|
||||
Some(o) => o
|
||||
.canonicalize()
|
||||
.fs_context("failed to canonicalize capability file path", o.clone())?,
|
||||
None => {
|
||||
let dir = tauri_dir();
|
||||
let capabilities_dir = dir.join("capabilities");
|
||||
@ -125,17 +128,21 @@ pub fn command(options: Options) -> Result<()> {
|
||||
);
|
||||
let overwrite = prompts::confirm(&format!("{msg}, overwrite?"), Some(false))?;
|
||||
if overwrite {
|
||||
std::fs::remove_file(&path)?;
|
||||
std::fs::remove_file(&path).fs_context("failed to remove capability file", path.clone())?;
|
||||
} else {
|
||||
anyhow::bail!(msg);
|
||||
crate::error::bail!(msg);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(parent) = path.parent() {
|
||||
std::fs::create_dir_all(parent)?;
|
||||
std::fs::create_dir_all(parent).fs_context(
|
||||
"failed to create capability directory",
|
||||
parent.to_path_buf(),
|
||||
)?;
|
||||
}
|
||||
|
||||
std::fs::write(&path, options.format.serialize(&capability)?)?;
|
||||
std::fs::write(&path, options.format.serialize(&capability)?)
|
||||
.fs_context("failed to write capability file", path.clone())?;
|
||||
|
||||
log::info!(action = "Created"; "capability at {}", dunce::simplified(&path).display());
|
||||
|
||||
|
||||
@ -2,6 +2,7 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
use crate::error::Context;
|
||||
use serde::Serialize;
|
||||
use std::fmt::Display;
|
||||
|
||||
@ -33,8 +34,8 @@ impl FileFormat {
|
||||
|
||||
pub fn serialize<S: Serialize>(&self, s: &S) -> crate::Result<String> {
|
||||
let contents = match self {
|
||||
Self::Json => serde_json::to_string_pretty(s)?,
|
||||
Self::Toml => toml_edit::ser::to_string_pretty(s)?,
|
||||
Self::Json => serde_json::to_string_pretty(s).context("failed to serialize JSON")?,
|
||||
Self::Toml => toml_edit::ser::to_string_pretty(s).context("failed to serialize TOML")?,
|
||||
};
|
||||
Ok(contents)
|
||||
}
|
||||
|
||||
@ -7,6 +7,7 @@ use std::path::Path;
|
||||
use clap::Parser;
|
||||
|
||||
use crate::{
|
||||
error::{Context, ErrorExt},
|
||||
helpers::{app_paths::resolve_tauri_dir, prompts},
|
||||
Result,
|
||||
};
|
||||
@ -100,7 +101,9 @@ impl TomlOrJson {
|
||||
fn to_string(&self) -> Result<String> {
|
||||
Ok(match self {
|
||||
TomlOrJson::Toml(t) => t.to_string(),
|
||||
TomlOrJson::Json(j) => serde_json::to_string_pretty(&j)?,
|
||||
TomlOrJson::Json(j) => {
|
||||
serde_json::to_string_pretty(&j).context("failed to serialize JSON")?
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -131,12 +134,12 @@ pub struct Options {
|
||||
pub fn command(options: Options) -> Result<()> {
|
||||
let dir = match resolve_tauri_dir() {
|
||||
Some(t) => t,
|
||||
None => std::env::current_dir()?,
|
||||
None => std::env::current_dir().context("failed to resolve current directory")?,
|
||||
};
|
||||
|
||||
let capabilities_dir = dir.join("capabilities");
|
||||
if !capabilities_dir.exists() {
|
||||
anyhow::bail!(
|
||||
crate::error::bail!(
|
||||
"Couldn't find capabilities directory at {}",
|
||||
dunce::simplified(&capabilities_dir).display()
|
||||
);
|
||||
@ -148,7 +151,11 @@ pub fn command(options: Options) -> Result<()> {
|
||||
.split_once(':')
|
||||
.and_then(|(plugin, _permission)| known_plugins.get(&plugin));
|
||||
|
||||
let capabilities_iter = std::fs::read_dir(&capabilities_dir)?
|
||||
let capabilities_iter = std::fs::read_dir(&capabilities_dir)
|
||||
.fs_context(
|
||||
"failed to read capabilities directory",
|
||||
capabilities_dir.clone(),
|
||||
)?
|
||||
.flatten()
|
||||
.filter(|e| e.file_type().map(|e| e.is_file()).unwrap_or_default())
|
||||
.filter_map(|e| {
|
||||
@ -240,7 +247,7 @@ pub fn command(options: Options) -> Result<()> {
|
||||
)?;
|
||||
|
||||
if selections.is_empty() {
|
||||
anyhow::bail!("You did not select any capabilities to update");
|
||||
crate::error::bail!("You did not select any capabilities to update");
|
||||
}
|
||||
|
||||
selections
|
||||
@ -252,7 +259,7 @@ pub fn command(options: Options) -> Result<()> {
|
||||
};
|
||||
|
||||
if capabilities.is_empty() {
|
||||
anyhow::bail!("Could not find a capability to update");
|
||||
crate::error::bail!("Could not find a capability to update");
|
||||
}
|
||||
|
||||
for (capability, path) in &mut capabilities {
|
||||
@ -265,7 +272,8 @@ pub fn command(options: Options) -> Result<()> {
|
||||
);
|
||||
} else {
|
||||
capability.insert_permission(options.identifier.clone());
|
||||
std::fs::write(&*path, capability.to_string()?)?;
|
||||
std::fs::write(&*path, capability.to_string()?)
|
||||
.fs_context("failed to write capability file", path.clone())?;
|
||||
log::info!(action = "Added"; "permission `{}` to `{}` at {}", options.identifier, capability.identifier(), dunce::simplified(path).display());
|
||||
}
|
||||
}
|
||||
|
||||
@ -4,7 +4,11 @@
|
||||
|
||||
use clap::Parser;
|
||||
|
||||
use crate::{helpers::app_paths::tauri_dir, Result};
|
||||
use crate::{
|
||||
error::{Context, ErrorExt},
|
||||
helpers::app_paths::tauri_dir,
|
||||
Result,
|
||||
};
|
||||
use colored::Colorize;
|
||||
use tauri_utils::acl::{manifest::Manifest, APP_ACL_KEY};
|
||||
|
||||
@ -29,8 +33,10 @@ pub fn command(options: Options) -> Result<()> {
|
||||
.join("acl-manifests.json");
|
||||
|
||||
if acl_manifests_path.exists() {
|
||||
let plugin_manifest_json = read_to_string(&acl_manifests_path)?;
|
||||
let acl = serde_json::from_str::<BTreeMap<String, Manifest>>(&plugin_manifest_json)?;
|
||||
let plugin_manifest_json = read_to_string(&acl_manifests_path)
|
||||
.fs_context("failed to read plugin manifest", acl_manifests_path)?;
|
||||
let acl = serde_json::from_str::<BTreeMap<String, Manifest>>(&plugin_manifest_json)
|
||||
.context("failed to parse plugin manifest as JSON")?;
|
||||
|
||||
for (key, manifest) in acl {
|
||||
if options
|
||||
@ -147,6 +153,6 @@ pub fn command(options: Options) -> Result<()> {
|
||||
|
||||
Ok(())
|
||||
} else {
|
||||
anyhow::bail!("permission file not found, please build your application once first")
|
||||
crate::error::bail!("permission file not found, please build your application once first")
|
||||
}
|
||||
}
|
||||
|
||||
@ -8,6 +8,7 @@ use clap::Parser;
|
||||
|
||||
use crate::{
|
||||
acl::FileFormat,
|
||||
error::{Context, ErrorExt},
|
||||
helpers::{app_paths::resolve_tauri_dir, prompts},
|
||||
Result,
|
||||
};
|
||||
@ -67,11 +68,13 @@ pub fn command(options: Options) -> Result<()> {
|
||||
};
|
||||
|
||||
let path = match options.out {
|
||||
Some(o) => o.canonicalize()?,
|
||||
Some(o) => o
|
||||
.canonicalize()
|
||||
.fs_context("failed to canonicalize permission file path", o.clone())?,
|
||||
None => {
|
||||
let dir = match resolve_tauri_dir() {
|
||||
Some(t) => t,
|
||||
None => std::env::current_dir()?,
|
||||
None => std::env::current_dir().context("failed to resolve current directory")?,
|
||||
};
|
||||
let permissions_dir = dir.join("permissions");
|
||||
permissions_dir.join(format!(
|
||||
@ -89,24 +92,31 @@ pub fn command(options: Options) -> Result<()> {
|
||||
);
|
||||
let overwrite = prompts::confirm(&format!("{msg}, overwrite?"), Some(false))?;
|
||||
if overwrite {
|
||||
std::fs::remove_file(&path)?;
|
||||
std::fs::remove_file(&path).fs_context("failed to remove permission file", path.clone())?;
|
||||
} else {
|
||||
anyhow::bail!(msg);
|
||||
crate::error::bail!(msg);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(parent) = path.parent() {
|
||||
std::fs::create_dir_all(parent)?;
|
||||
std::fs::create_dir_all(parent).fs_context(
|
||||
"failed to create permission directory",
|
||||
parent.to_path_buf(),
|
||||
)?;
|
||||
}
|
||||
|
||||
std::fs::write(
|
||||
&path,
|
||||
options.format.serialize(&PermissionFile {
|
||||
default: None,
|
||||
set: Vec::new(),
|
||||
permission: vec![permission],
|
||||
})?,
|
||||
)?;
|
||||
options
|
||||
.format
|
||||
.serialize(&PermissionFile {
|
||||
default: None,
|
||||
set: Vec::new(),
|
||||
permission: vec![permission],
|
||||
})
|
||||
.context("failed to serialize permission")?,
|
||||
)
|
||||
.fs_context("failed to write permission file", path.clone())?;
|
||||
|
||||
log::info!(action = "Created"; "permission at {}", dunce::simplified(&path).display());
|
||||
|
||||
|
||||
@ -7,11 +7,21 @@ use std::path::Path;
|
||||
use clap::Parser;
|
||||
use tauri_utils::acl::{manifest::PermissionFile, PERMISSION_SCHEMA_FILE_NAME};
|
||||
|
||||
use crate::{acl::FileFormat, helpers::app_paths::resolve_tauri_dir, Result};
|
||||
use crate::{
|
||||
acl::FileFormat,
|
||||
error::{Context, ErrorExt},
|
||||
helpers::app_paths::resolve_tauri_dir,
|
||||
Result,
|
||||
};
|
||||
|
||||
fn rm_permission_files(identifier: &str, dir: &Path) -> Result<()> {
|
||||
for entry in std::fs::read_dir(dir)?.flatten() {
|
||||
let file_type = entry.file_type()?;
|
||||
for entry in std::fs::read_dir(dir)
|
||||
.fs_context("failed to read permissions directory", dir.to_path_buf())?
|
||||
.flatten()
|
||||
{
|
||||
let file_type = entry
|
||||
.file_type()
|
||||
.fs_context("failed to get permission file type", entry.path())?;
|
||||
let path = entry.path();
|
||||
if file_type.is_dir() {
|
||||
rm_permission_files(identifier, &path)?;
|
||||
@ -27,12 +37,21 @@ fn rm_permission_files(identifier: &str, dir: &Path) -> Result<()> {
|
||||
let (mut permission_file, format): (PermissionFile, FileFormat) =
|
||||
match path.extension().and_then(|o| o.to_str()) {
|
||||
Some("toml") => {
|
||||
let content = std::fs::read_to_string(&path)?;
|
||||
(toml::from_str(&content)?, FileFormat::Toml)
|
||||
let content = std::fs::read_to_string(&path)
|
||||
.fs_context("failed to read permission file", path.clone())?;
|
||||
(
|
||||
toml::from_str(&content).context("failed to deserialize permission file")?,
|
||||
FileFormat::Toml,
|
||||
)
|
||||
}
|
||||
Some("json") => {
|
||||
let content = std::fs::read(&path)?;
|
||||
(serde_json::from_slice(&content)?, FileFormat::Json)
|
||||
let content =
|
||||
std::fs::read(&path).fs_context("failed to read permission file", path.clone())?;
|
||||
(
|
||||
serde_json::from_slice(&content)
|
||||
.context("failed to parse permission file as JSON")?,
|
||||
FileFormat::Json,
|
||||
)
|
||||
}
|
||||
_ => {
|
||||
continue;
|
||||
@ -63,10 +82,16 @@ fn rm_permission_files(identifier: &str, dir: &Path) -> Result<()> {
|
||||
&& permission_file.set.is_empty()
|
||||
&& permission_file.permission.is_empty()
|
||||
{
|
||||
std::fs::remove_file(&path)?;
|
||||
std::fs::remove_file(&path).fs_context("failed to remove permission file", path.clone())?;
|
||||
log::info!(action = "Removed"; "file {}", dunce::simplified(&path).display());
|
||||
} else if updated {
|
||||
std::fs::write(&path, format.serialize(&permission_file)?)?;
|
||||
std::fs::write(
|
||||
&path,
|
||||
format
|
||||
.serialize(&permission_file)
|
||||
.context("failed to serialize permission")?,
|
||||
)
|
||||
.fs_context("failed to write permission file", path.clone())?;
|
||||
log::info!(action = "Removed"; "permission {identifier} from {}", dunce::simplified(&path).display());
|
||||
}
|
||||
}
|
||||
@ -76,13 +101,19 @@ fn rm_permission_files(identifier: &str, dir: &Path) -> Result<()> {
|
||||
}
|
||||
|
||||
fn rm_permission_from_capabilities(identifier: &str, dir: &Path) -> Result<()> {
|
||||
for entry in std::fs::read_dir(dir)?.flatten() {
|
||||
let file_type = entry.file_type()?;
|
||||
for entry in std::fs::read_dir(dir)
|
||||
.fs_context("failed to read capabilities directory", dir.to_path_buf())?
|
||||
.flatten()
|
||||
{
|
||||
let file_type = entry
|
||||
.file_type()
|
||||
.fs_context("failed to get capability file type", entry.path())?;
|
||||
if file_type.is_file() {
|
||||
let path = entry.path();
|
||||
match path.extension().and_then(|o| o.to_str()) {
|
||||
Some("toml") => {
|
||||
let content = std::fs::read_to_string(&path)?;
|
||||
let content = std::fs::read_to_string(&path)
|
||||
.fs_context("failed to read capability file", path.clone())?;
|
||||
if let Ok(mut value) = content.parse::<toml_edit::DocumentMut>() {
|
||||
if let Some(permissions) = value.get_mut("permissions").and_then(|p| p.as_array_mut()) {
|
||||
let prev_len = permissions.len();
|
||||
@ -98,14 +129,16 @@ fn rm_permission_from_capabilities(identifier: &str, dir: &Path) -> Result<()> {
|
||||
_ => false,
|
||||
});
|
||||
if prev_len != permissions.len() {
|
||||
std::fs::write(&path, value.to_string())?;
|
||||
std::fs::write(&path, value.to_string())
|
||||
.fs_context("failed to write capability file", path.clone())?;
|
||||
log::info!(action = "Removed"; "permission from capability at {}", dunce::simplified(&path).display());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Some("json") => {
|
||||
let content = std::fs::read(&path)?;
|
||||
let content =
|
||||
std::fs::read(&path).fs_context("failed to read capability file", path.clone())?;
|
||||
if let Ok(mut value) = serde_json::from_slice::<serde_json::Value>(&content) {
|
||||
if let Some(permissions) = value.get_mut("permissions").and_then(|p| p.as_array_mut()) {
|
||||
let prev_len = permissions.len();
|
||||
@ -121,7 +154,12 @@ fn rm_permission_from_capabilities(identifier: &str, dir: &Path) -> Result<()> {
|
||||
_ => false,
|
||||
});
|
||||
if prev_len != permissions.len() {
|
||||
std::fs::write(&path, serde_json::to_vec_pretty(&value)?)?;
|
||||
std::fs::write(
|
||||
&path,
|
||||
serde_json::to_vec_pretty(&value)
|
||||
.context("failed to serialize capability JSON")?,
|
||||
)
|
||||
.fs_context("failed to write capability file", path.clone())?;
|
||||
log::info!(action = "Removed"; "permission from capability at {}", dunce::simplified(&path).display());
|
||||
}
|
||||
}
|
||||
@ -152,7 +190,9 @@ pub struct Options {
|
||||
}
|
||||
|
||||
pub fn command(options: Options) -> Result<()> {
|
||||
let permissions_dir = std::env::current_dir()?.join("permissions");
|
||||
let permissions_dir = std::env::current_dir()
|
||||
.context("failed to resolve current directory")?
|
||||
.join("permissions");
|
||||
if permissions_dir.exists() {
|
||||
rm_permission_files(&options.identifier, &permissions_dir)?;
|
||||
}
|
||||
|
||||
@ -8,6 +8,7 @@ use regex::Regex;
|
||||
|
||||
use crate::{
|
||||
acl,
|
||||
error::ErrorExt,
|
||||
helpers::{
|
||||
app_paths::{resolve_frontend_dir, tauri_dir},
|
||||
cargo,
|
||||
@ -64,7 +65,7 @@ pub fn run(options: Options) -> Result<()> {
|
||||
};
|
||||
|
||||
if !is_known && (options.tag.is_some() || options.rev.is_some() || options.branch.is_some()) {
|
||||
anyhow::bail!(
|
||||
crate::error::bail!(
|
||||
"Git options --tag, --rev and --branch can only be used with official Tauri plugins"
|
||||
);
|
||||
}
|
||||
@ -114,7 +115,7 @@ pub fn run(options: Options) -> Result<()> {
|
||||
format!("tauri-apps/tauri-plugin-{plugin}#{branch}")
|
||||
}
|
||||
(None, None, None, None) => npm_name,
|
||||
_ => anyhow::bail!("Only one of --tag, --rev and --branch can be specified"),
|
||||
_ => crate::error::bail!("Only one of --tag, --rev and --branch can be specified"),
|
||||
};
|
||||
manager.install(&[npm_spec], tauri_dir)?;
|
||||
}
|
||||
@ -141,9 +142,10 @@ pub fn run(options: Options) -> Result<()> {
|
||||
};
|
||||
let plugin_init = format!(".plugin(tauri_plugin_{plugin_snake_case}::{plugin_init_fn})");
|
||||
|
||||
let re = Regex::new(r"(tauri\s*::\s*Builder\s*::\s*default\(\))(\s*)")?;
|
||||
let re = Regex::new(r"(tauri\s*::\s*Builder\s*::\s*default\(\))(\s*)").unwrap();
|
||||
for file in [tauri_dir.join("src/main.rs"), tauri_dir.join("src/lib.rs")] {
|
||||
let contents = std::fs::read_to_string(&file)?;
|
||||
let contents =
|
||||
std::fs::read_to_string(&file).fs_context("failed to read Rust entry point", file.clone())?;
|
||||
|
||||
if contents.contains(&plugin_init) {
|
||||
log::info!(
|
||||
@ -157,7 +159,7 @@ pub fn run(options: Options) -> Result<()> {
|
||||
let out = re.replace(&contents, format!("$1$2{plugin_init}$2"));
|
||||
|
||||
log::info!("Adding plugin to {}", file.display());
|
||||
std::fs::write(file, out.as_bytes())?;
|
||||
std::fs::write(&file, out.as_bytes()).fs_context("failed to write plugin init code", file)?;
|
||||
|
||||
if !options.no_fmt {
|
||||
// reformat code with rustfmt
|
||||
|
||||
@ -4,16 +4,16 @@
|
||||
|
||||
use crate::{
|
||||
bundle::BundleFormat,
|
||||
error::{Context, ErrorExt},
|
||||
helpers::{
|
||||
self,
|
||||
app_paths::{frontend_dir, tauri_dir},
|
||||
config::{get as get_config, ConfigHandle, FrontendDist},
|
||||
config::{get as get_config, ConfigMetadata, FrontendDist},
|
||||
},
|
||||
info::plugins::check_mismatched_packages,
|
||||
interface::{rust::get_cargo_target_dir, AppInterface, Interface},
|
||||
ConfigValue, Result,
|
||||
};
|
||||
use anyhow::Context;
|
||||
use clap::{ArgAction, Parser};
|
||||
use std::env::set_current_dir;
|
||||
use tauri_utils::config::RunnerConfig;
|
||||
@ -106,11 +106,11 @@ pub fn command(mut options: Options, verbosity: u8) -> Result<()> {
|
||||
options.target.clone(),
|
||||
)?;
|
||||
|
||||
setup(&interface, &mut options, config.clone(), false)?;
|
||||
|
||||
let config_guard = config.lock().unwrap();
|
||||
let config_ = config_guard.as_ref().unwrap();
|
||||
|
||||
setup(&interface, &mut options, config_, false)?;
|
||||
|
||||
if let Some(minimum_system_version) = &config_.bundle.macos.minimum_system_version {
|
||||
std::env::set_var("MACOSX_DEPLOYMENT_TARGET", minimum_system_version);
|
||||
}
|
||||
@ -132,7 +132,7 @@ pub fn command(mut options: Options, verbosity: u8) -> Result<()> {
|
||||
verbosity,
|
||||
ci,
|
||||
&interface,
|
||||
&app_settings,
|
||||
&*app_settings,
|
||||
config_,
|
||||
&out_dir,
|
||||
)?;
|
||||
@ -144,7 +144,7 @@ pub fn command(mut options: Options, verbosity: u8) -> Result<()> {
|
||||
pub fn setup(
|
||||
interface: &AppInterface,
|
||||
options: &mut Options,
|
||||
config: ConfigHandle,
|
||||
config: &ConfigMetadata,
|
||||
mobile: bool,
|
||||
) -> Result<()> {
|
||||
let tauri_path = tauri_dir();
|
||||
@ -160,61 +160,61 @@ pub fn setup(
|
||||
}
|
||||
}
|
||||
|
||||
set_current_dir(tauri_path).with_context(|| "failed to change current working directory")?;
|
||||
set_current_dir(tauri_path).context("failed to set current directory")?;
|
||||
|
||||
let config_guard = config.lock().unwrap();
|
||||
let config_ = config_guard.as_ref().unwrap();
|
||||
|
||||
let bundle_identifier_source = config_
|
||||
let bundle_identifier_source = config
|
||||
.find_bundle_identifier_overwriter()
|
||||
.unwrap_or_else(|| "tauri.conf.json".into());
|
||||
|
||||
if config_.identifier == "com.tauri.dev" {
|
||||
anyhow::bail!(
|
||||
if config.identifier == "com.tauri.dev" {
|
||||
crate::error::bail!(
|
||||
"You must change the bundle identifier in `{bundle_identifier_source} identifier`. The default value `com.tauri.dev` is not allowed as it must be unique across applications.",
|
||||
);
|
||||
}
|
||||
|
||||
if config_
|
||||
if config
|
||||
.identifier
|
||||
.chars()
|
||||
.any(|ch| !(ch.is_alphanumeric() || ch == '-' || ch == '.'))
|
||||
{
|
||||
anyhow::bail!(
|
||||
"The bundle identifier \"{}\" set in `{} identifier`. The bundle identifier string must contain only alphanumeric characters (A-Z, a-z, and 0-9), hyphens (-), and periods (.).",
|
||||
config_.identifier,
|
||||
bundle_identifier_source
|
||||
crate::error::bail!(
|
||||
"The bundle identifier \"{}\" set in `{bundle_identifier_source:?} identifier`. The bundle identifier string must contain only alphanumeric characters (A-Z, a-z, and 0-9), hyphens (-), and periods (.).",
|
||||
config.identifier,
|
||||
);
|
||||
}
|
||||
|
||||
if config_.identifier.ends_with(".app") {
|
||||
if config.identifier.ends_with(".app") {
|
||||
log::warn!(
|
||||
"The bundle identifier \"{}\" set in `{} identifier` ends with `.app`. This is not recommended because it conflicts with the application bundle extension on macOS.",
|
||||
config_.identifier,
|
||||
bundle_identifier_source
|
||||
"The bundle identifier \"{}\" set in `{bundle_identifier_source:?} identifier` ends with `.app`. This is not recommended because it conflicts with the application bundle extension on macOS.",
|
||||
config.identifier,
|
||||
);
|
||||
}
|
||||
|
||||
if let Some(before_build) = config_.build.before_build_command.clone() {
|
||||
if let Some(before_build) = config.build.before_build_command.clone() {
|
||||
helpers::run_hook("beforeBuildCommand", before_build, interface, options.debug)?;
|
||||
}
|
||||
|
||||
if let Some(FrontendDist::Directory(web_asset_path)) = &config_.build.frontend_dist {
|
||||
if let Some(FrontendDist::Directory(web_asset_path)) = &config.build.frontend_dist {
|
||||
if !web_asset_path.exists() {
|
||||
let absolute_path = web_asset_path
|
||||
.parent()
|
||||
.and_then(|p| p.canonicalize().ok())
|
||||
.map(|p| p.join(web_asset_path.file_name().unwrap()))
|
||||
.unwrap_or_else(|| std::env::current_dir().unwrap().join(web_asset_path));
|
||||
return Err(anyhow::anyhow!(
|
||||
"Unable to find your web assets, did you forget to build your web app? Your frontendDist is set to \"{}\" (which is `{}`).",
|
||||
web_asset_path.display(), absolute_path.display(),
|
||||
));
|
||||
crate::error::bail!(
|
||||
"Unable to find your web assets, did you forget to build your web app? Your frontendDist is set to \"{}\" (which is `{}`).",
|
||||
web_asset_path.display(), absolute_path.display(),
|
||||
);
|
||||
}
|
||||
if web_asset_path.canonicalize()?.file_name() == Some(std::ffi::OsStr::new("src-tauri")) {
|
||||
return Err(anyhow::anyhow!(
|
||||
if web_asset_path
|
||||
.canonicalize()
|
||||
.fs_context("failed to canonicalize path", web_asset_path.to_path_buf())?
|
||||
.file_name()
|
||||
== Some(std::ffi::OsStr::new("src-tauri"))
|
||||
{
|
||||
crate::error::bail!(
|
||||
"The configured frontendDist is the `src-tauri` folder. Please isolate your web assets on a separate folder and update `tauri.conf.json > build > frontendDist`.",
|
||||
));
|
||||
);
|
||||
}
|
||||
|
||||
// Issue #13287 - Allow the use of target dir inside frontendDist/distDir
|
||||
@ -238,22 +238,22 @@ pub fn setup(
|
||||
}
|
||||
|
||||
if !out_folders.is_empty() {
|
||||
return Err(anyhow::anyhow!(
|
||||
crate::error::bail!(
|
||||
"The configured frontendDist includes the `{:?}` {}. Please isolate your web assets on a separate folder and update `tauri.conf.json > build > frontendDist`.",
|
||||
out_folders,
|
||||
if out_folders.len() == 1 { "folder" } else { "folders" }
|
||||
));
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if options.runner.is_none() {
|
||||
options.runner = config_.build.runner.clone();
|
||||
options.runner = config.build.runner.clone();
|
||||
}
|
||||
|
||||
options
|
||||
.features
|
||||
.get_or_insert(Vec::new())
|
||||
.extend(config_.build.features.clone().unwrap_or_default());
|
||||
.extend(config.build.features.clone().unwrap_or_default());
|
||||
interface.build_options(&mut options.args, &mut options.features, mobile);
|
||||
|
||||
Ok(())
|
||||
|
||||
@ -8,12 +8,12 @@ use std::{
|
||||
sync::OnceLock,
|
||||
};
|
||||
|
||||
use anyhow::Context;
|
||||
use clap::{builder::PossibleValue, ArgAction, Parser, ValueEnum};
|
||||
use tauri_bundler::PackageType;
|
||||
use tauri_utils::platform::Target;
|
||||
|
||||
use crate::{
|
||||
error::{Context, ErrorExt},
|
||||
helpers::{
|
||||
self,
|
||||
app_paths::tauri_dir,
|
||||
@ -28,11 +28,11 @@ use crate::{
|
||||
pub struct BundleFormat(PackageType);
|
||||
|
||||
impl FromStr for BundleFormat {
|
||||
type Err = anyhow::Error;
|
||||
type Err = crate::Error;
|
||||
fn from_str(s: &str) -> crate::Result<Self> {
|
||||
PackageType::from_short_name(s)
|
||||
.map(Self)
|
||||
.ok_or_else(|| anyhow::anyhow!("unknown bundle format {s}"))
|
||||
.with_context(|| format!("unknown bundle format {s}"))
|
||||
}
|
||||
}
|
||||
|
||||
@ -139,8 +139,7 @@ pub fn command(options: Options, verbosity: u8) -> crate::Result<()> {
|
||||
)?;
|
||||
|
||||
let tauri_path = tauri_dir();
|
||||
std::env::set_current_dir(tauri_path)
|
||||
.with_context(|| "failed to change current working directory")?;
|
||||
std::env::set_current_dir(tauri_path).context("failed to set current directory")?;
|
||||
|
||||
let config_guard = config.lock().unwrap();
|
||||
let config_ = config_guard.as_ref().unwrap();
|
||||
@ -159,7 +158,7 @@ pub fn command(options: Options, verbosity: u8) -> crate::Result<()> {
|
||||
verbosity,
|
||||
ci,
|
||||
&interface,
|
||||
&app_settings,
|
||||
&*app_settings,
|
||||
config_,
|
||||
&out_dir,
|
||||
)
|
||||
@ -171,7 +170,7 @@ pub fn bundle<A: AppSettings>(
|
||||
verbosity: u8,
|
||||
ci: bool,
|
||||
interface: &AppInterface,
|
||||
app_settings: &std::sync::Arc<A>,
|
||||
app_settings: &A,
|
||||
config: &ConfigMetadata,
|
||||
out_dir: &Path,
|
||||
) -> crate::Result<()> {
|
||||
@ -214,12 +213,7 @@ pub fn bundle<A: AppSettings>(
|
||||
_ => log::Level::Trace,
|
||||
});
|
||||
|
||||
let bundles = tauri_bundler::bundle_project(&settings)
|
||||
.map_err(|e| match e {
|
||||
tauri_bundler::Error::BundlerError(e) => e,
|
||||
e => anyhow::anyhow!("{e:#}"),
|
||||
})
|
||||
.with_context(|| "failed to bundle project")?;
|
||||
let bundles = tauri_bundler::bundle_project(&settings).map_err(Box::new)?;
|
||||
|
||||
sign_updaters(settings, bundles, ci)?;
|
||||
|
||||
@ -255,12 +249,18 @@ fn sign_updaters(
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if settings.no_sign() {
|
||||
log::warn!("Updater signing is skipped due to --no-sign flag.");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// get the public key
|
||||
let pubkey = &update_settings.pubkey;
|
||||
// check if pubkey points to a file...
|
||||
let maybe_path = Path::new(pubkey);
|
||||
let pubkey = if maybe_path.exists() {
|
||||
std::fs::read_to_string(maybe_path)?
|
||||
std::fs::read_to_string(maybe_path)
|
||||
.fs_context("failed to read pubkey from file", maybe_path.to_path_buf())?
|
||||
} else {
|
||||
pubkey.to_string()
|
||||
};
|
||||
@ -272,12 +272,15 @@ fn sign_updaters(
|
||||
|
||||
// get the private key
|
||||
let private_key = std::env::var("TAURI_SIGNING_PRIVATE_KEY")
|
||||
.map_err(|_| anyhow::anyhow!("A public key has been found, but no private key. Make sure to set `TAURI_SIGNING_PRIVATE_KEY` environment variable."))?;
|
||||
.ok()
|
||||
.context("A public key has been found, but no private key. Make sure to set `TAURI_SIGNING_PRIVATE_KEY` environment variable.")?;
|
||||
// check if private_key points to a file...
|
||||
let maybe_path = Path::new(&private_key);
|
||||
let private_key = if maybe_path.exists() {
|
||||
std::fs::read_to_string(maybe_path)
|
||||
.with_context(|| format!("faild to read {}", maybe_path.display()))?
|
||||
std::fs::read_to_string(maybe_path).fs_context(
|
||||
"failed to read private key from file",
|
||||
maybe_path.to_path_buf(),
|
||||
)?
|
||||
} else {
|
||||
private_key
|
||||
};
|
||||
@ -315,11 +318,11 @@ fn print_signed_updater_archive(output_paths: &[PathBuf]) -> crate::Result<()> {
|
||||
};
|
||||
let mut printable_paths = String::new();
|
||||
for path in output_paths {
|
||||
writeln!(
|
||||
let _ = writeln!(
|
||||
printable_paths,
|
||||
" {}",
|
||||
tauri_utils::display_path(path)
|
||||
)?;
|
||||
);
|
||||
}
|
||||
log::info!( action = "Finished"; "{finished_bundles} {pluralised} at:\n{printable_paths}");
|
||||
}
|
||||
|
||||
@ -2,8 +2,7 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
use crate::Result;
|
||||
use anyhow::Context;
|
||||
use crate::{error::ErrorExt, Result};
|
||||
use clap::{Command, Parser};
|
||||
use clap_complete::{generate, Shell};
|
||||
|
||||
@ -95,7 +94,7 @@ pub fn command(options: Options, cmd: Command) -> Result<()> {
|
||||
|
||||
let completions = get_completions(options.shell, cmd)?;
|
||||
if let Some(output) = options.output {
|
||||
write(output, completions).context("failed to write to output path")?;
|
||||
write(&output, completions).fs_context("failed to write to completions", output)?;
|
||||
} else {
|
||||
print!("{completions}");
|
||||
}
|
||||
|
||||
@ -3,6 +3,7 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
use crate::{
|
||||
error::{Context, ErrorExt},
|
||||
helpers::{
|
||||
app_paths::{frontend_dir, tauri_dir},
|
||||
command_env,
|
||||
@ -12,10 +13,9 @@ use crate::{
|
||||
},
|
||||
info::plugins::check_mismatched_packages,
|
||||
interface::{AppInterface, ExitReason, Interface},
|
||||
CommandExt, ConfigValue, Result,
|
||||
CommandExt, ConfigValue, Error, Result,
|
||||
};
|
||||
|
||||
use anyhow::{bail, Context};
|
||||
use clap::{ArgAction, Parser};
|
||||
use shared_child::SharedChild;
|
||||
use tauri_utils::{config::RunnerConfig, platform::Target};
|
||||
@ -34,7 +34,7 @@ use std::{
|
||||
mod builtin_dev_server;
|
||||
|
||||
static BEFORE_DEV: OnceLock<Mutex<Arc<SharedChild>>> = OnceLock::new();
|
||||
static KILL_BEFORE_DEV_FLAG: OnceLock<AtomicBool> = OnceLock::new();
|
||||
static KILL_BEFORE_DEV_FLAG: AtomicBool = AtomicBool::new(false);
|
||||
|
||||
#[cfg(unix)]
|
||||
const KILL_CHILDREN_SCRIPT: &[u8] = include_bytes!("../scripts/kill-children.sh");
|
||||
@ -143,7 +143,7 @@ pub fn setup(interface: &AppInterface, options: &mut Options, config: ConfigHand
|
||||
}
|
||||
});
|
||||
|
||||
set_current_dir(tauri_path).with_context(|| "failed to change current working directory")?;
|
||||
set_current_dir(tauri_path).context("failed to set current directory")?;
|
||||
|
||||
if let Some(before_dev) = config
|
||||
.lock()
|
||||
@ -190,15 +190,15 @@ pub fn setup(interface: &AppInterface, options: &mut Options, config: ConfigHand
|
||||
};
|
||||
|
||||
if wait {
|
||||
let status = command.piped().with_context(|| {
|
||||
format!(
|
||||
"failed to run `{}` with `{}`",
|
||||
before_dev,
|
||||
let status = command.piped().map_err(|error| Error::CommandFailed {
|
||||
command: format!(
|
||||
"`{before_dev}` with `{}`",
|
||||
if cfg!(windows) { "cmd /S /C" } else { "sh -c" }
|
||||
)
|
||||
),
|
||||
error,
|
||||
})?;
|
||||
if !status.success() {
|
||||
bail!(
|
||||
crate::error::bail!(
|
||||
"beforeDevCommand `{}` failed with exit code {}",
|
||||
before_dev,
|
||||
status.code().unwrap_or_default()
|
||||
@ -206,8 +206,8 @@ pub fn setup(interface: &AppInterface, options: &mut Options, config: ConfigHand
|
||||
}
|
||||
} else {
|
||||
command.stdin(Stdio::piped());
|
||||
command.stdout(os_pipe::dup_stdout()?);
|
||||
command.stderr(os_pipe::dup_stderr()?);
|
||||
command.stdout(os_pipe::dup_stdout().unwrap());
|
||||
command.stderr(os_pipe::dup_stderr().unwrap());
|
||||
|
||||
let child = SharedChild::spawn(&mut command)
|
||||
.unwrap_or_else(|_| panic!("failed to run `{before_dev}`"));
|
||||
@ -218,14 +218,13 @@ pub fn setup(interface: &AppInterface, options: &mut Options, config: ConfigHand
|
||||
let status = child_
|
||||
.wait()
|
||||
.expect("failed to wait on \"beforeDevCommand\"");
|
||||
if !(status.success() || KILL_BEFORE_DEV_FLAG.get().unwrap().load(Ordering::Relaxed)) {
|
||||
if !(status.success() || KILL_BEFORE_DEV_FLAG.load(Ordering::Relaxed)) {
|
||||
log::error!("The \"beforeDevCommand\" terminated with a non-zero status code.");
|
||||
exit(status.code().unwrap_or(1));
|
||||
}
|
||||
});
|
||||
|
||||
BEFORE_DEV.set(Mutex::new(child)).unwrap();
|
||||
KILL_BEFORE_DEV_FLAG.set(AtomicBool::default()).unwrap();
|
||||
|
||||
let _ = ctrlc::set_handler(move || {
|
||||
kill_before_dev_process();
|
||||
@ -278,13 +277,16 @@ pub fn setup(interface: &AppInterface, options: &mut Options, config: ConfigHand
|
||||
if !options.no_dev_server && dev_url.is_none() {
|
||||
if let Some(FrontendDist::Directory(path)) = &frontend_dist {
|
||||
if path.exists() {
|
||||
let path = path.canonicalize()?;
|
||||
let path = path
|
||||
.canonicalize()
|
||||
.fs_context("failed to canonicalize path", path.to_path_buf())?;
|
||||
|
||||
let ip = options
|
||||
.host
|
||||
.unwrap_or_else(|| Ipv4Addr::new(127, 0, 0, 1).into());
|
||||
|
||||
let server_url = builtin_dev_server::start(path, ip, options.port)?;
|
||||
let server_url = builtin_dev_server::start(path, ip, options.port)
|
||||
.context("failed to start builtin dev server")?;
|
||||
let server_url = format!("http://{server_url}");
|
||||
dev_url = Some(server_url.parse().unwrap());
|
||||
|
||||
@ -301,18 +303,16 @@ pub fn setup(interface: &AppInterface, options: &mut Options, config: ConfigHand
|
||||
|
||||
if !options.no_dev_server_wait {
|
||||
if let Some(url) = dev_url {
|
||||
let host = url
|
||||
.host()
|
||||
.unwrap_or_else(|| panic!("No host name in the URL"));
|
||||
let host = url.host().expect("No host name in the URL");
|
||||
let port = url
|
||||
.port_or_known_default()
|
||||
.unwrap_or_else(|| panic!("No port number in the URL"));
|
||||
.expect("No port number in the URL");
|
||||
let addrs;
|
||||
let addr;
|
||||
let addrs = match host {
|
||||
url::Host::Domain(domain) => {
|
||||
use std::net::ToSocketAddrs;
|
||||
addrs = (domain, port).to_socket_addrs()?;
|
||||
addrs = (domain, port).to_socket_addrs().unwrap();
|
||||
addrs.as_slice()
|
||||
}
|
||||
url::Host::Ipv4(ip) => {
|
||||
@ -377,11 +377,10 @@ pub fn on_app_exit(code: Option<i32>, reason: ExitReason, exit_on_panic: bool, n
|
||||
pub fn kill_before_dev_process() {
|
||||
if let Some(child) = BEFORE_DEV.get() {
|
||||
let child = child.lock().unwrap();
|
||||
let kill_before_dev_flag = KILL_BEFORE_DEV_FLAG.get().unwrap();
|
||||
if kill_before_dev_flag.load(Ordering::Relaxed) {
|
||||
if KILL_BEFORE_DEV_FLAG.load(Ordering::Relaxed) {
|
||||
return;
|
||||
}
|
||||
kill_before_dev_flag.store(true, Ordering::Relaxed);
|
||||
KILL_BEFORE_DEV_FLAG.store(true, Ordering::Relaxed);
|
||||
#[cfg(windows)]
|
||||
{
|
||||
let powershell_path = std::env::var("SYSTEMROOT").map_or_else(
|
||||
|
||||
@ -18,6 +18,8 @@ use std::{
|
||||
use tauri_utils::mime_type::MimeType;
|
||||
use tokio::sync::broadcast::{channel, Sender};
|
||||
|
||||
use crate::error::ErrorExt;
|
||||
|
||||
const RELOAD_SCRIPT: &str = include_str!("./auto-reload.js");
|
||||
|
||||
#[derive(Clone)]
|
||||
@ -29,7 +31,8 @@ struct ServerState {
|
||||
|
||||
pub fn start<P: AsRef<Path>>(dir: P, ip: IpAddr, port: Option<u16>) -> crate::Result<SocketAddr> {
|
||||
let dir = dir.as_ref();
|
||||
let dir = dunce::canonicalize(dir)?;
|
||||
let dir =
|
||||
dunce::canonicalize(dir).fs_context("failed to canonicalize path", dir.to_path_buf())?;
|
||||
|
||||
// bind port and tcp listener
|
||||
let auto_port = port.is_none();
|
||||
@ -37,12 +40,12 @@ pub fn start<P: AsRef<Path>>(dir: P, ip: IpAddr, port: Option<u16>) -> crate::Re
|
||||
let (tcp_listener, address) = loop {
|
||||
let address = SocketAddr::new(ip, port);
|
||||
if let Ok(tcp) = std::net::TcpListener::bind(address) {
|
||||
tcp.set_nonblocking(true)?;
|
||||
tcp.set_nonblocking(true).unwrap();
|
||||
break (tcp, address);
|
||||
}
|
||||
|
||||
if !auto_port {
|
||||
anyhow::bail!("Couldn't bind to {port} on {ip}");
|
||||
crate::error::bail!("Couldn't bind to {port} on {ip}");
|
||||
}
|
||||
|
||||
port += 1;
|
||||
@ -152,11 +155,11 @@ fn inject_address(html_bytes: Vec<u8>, address: &SocketAddr) -> Vec<u8> {
|
||||
}
|
||||
|
||||
fn fs_read_scoped(path: PathBuf, scope: &Path) -> crate::Result<Vec<u8>> {
|
||||
let path = dunce::canonicalize(path)?;
|
||||
let path = dunce::canonicalize(&path).fs_context("failed to canonicalize path", path)?;
|
||||
if path.starts_with(scope) {
|
||||
std::fs::read(path).map_err(Into::into)
|
||||
std::fs::read(&path).fs_context("failed to read file", &path)
|
||||
} else {
|
||||
anyhow::bail!("forbidden path")
|
||||
crate::error::bail!("forbidden path")
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
105
crates/tauri-cli/src/error.rs
Normal file
105
crates/tauri-cli/src/error.rs
Normal file
@ -0,0 +1,105 @@
|
||||
// Copyright 2019-2024 Tauri Programme within The Commons Conservancy
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
use std::{fmt::Display, path::PathBuf};
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum Error {
|
||||
#[error("{0}: {1}")]
|
||||
Context(String, Box<dyn std::error::Error + Send + Sync + 'static>),
|
||||
#[error("{0}")]
|
||||
GenericError(String),
|
||||
#[error("failed to bundle project {0}")]
|
||||
Bundler(#[from] Box<tauri_bundler::Error>),
|
||||
#[error("{context} {path}: {error}")]
|
||||
Fs {
|
||||
context: &'static str,
|
||||
path: PathBuf,
|
||||
error: std::io::Error,
|
||||
},
|
||||
#[error("failed to run command {command}: {error}")]
|
||||
CommandFailed {
|
||||
command: String,
|
||||
error: std::io::Error,
|
||||
},
|
||||
#[cfg(target_os = "macos")]
|
||||
#[error(transparent)]
|
||||
MacosSign(#[from] Box<tauri_macos_sign::Error>),
|
||||
}
|
||||
|
||||
/// Convenient type alias of Result type.
|
||||
pub type Result<T> = std::result::Result<T, Error>;
|
||||
|
||||
pub trait Context<T> {
|
||||
// Required methods
|
||||
fn context<C>(self, context: C) -> Result<T>
|
||||
where
|
||||
C: Display + Send + Sync + 'static;
|
||||
fn with_context<C, F>(self, f: F) -> Result<T>
|
||||
where
|
||||
C: Display + Send + Sync + 'static,
|
||||
F: FnOnce() -> C;
|
||||
}
|
||||
|
||||
impl<T, E: std::error::Error + Send + Sync + 'static> Context<T> for std::result::Result<T, E> {
|
||||
fn context<C>(self, context: C) -> Result<T>
|
||||
where
|
||||
C: Display + Send + Sync + 'static,
|
||||
{
|
||||
self.map_err(|e| Error::Context(context.to_string(), Box::new(e)))
|
||||
}
|
||||
|
||||
fn with_context<C, F>(self, f: F) -> Result<T>
|
||||
where
|
||||
C: Display + Send + Sync + 'static,
|
||||
F: FnOnce() -> C,
|
||||
{
|
||||
self.map_err(|e| Error::Context(f().to_string(), Box::new(e)))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Context<T> for Option<T> {
|
||||
fn context<C>(self, context: C) -> Result<T>
|
||||
where
|
||||
C: Display + Send + Sync + 'static,
|
||||
{
|
||||
self.ok_or_else(|| Error::GenericError(context.to_string()))
|
||||
}
|
||||
|
||||
fn with_context<C, F>(self, f: F) -> Result<T>
|
||||
where
|
||||
C: Display + Send + Sync + 'static,
|
||||
F: FnOnce() -> C,
|
||||
{
|
||||
self.ok_or_else(|| Error::GenericError(f().to_string()))
|
||||
}
|
||||
}
|
||||
|
||||
pub trait ErrorExt<T> {
|
||||
fn fs_context(self, context: &'static str, path: impl Into<PathBuf>) -> Result<T>;
|
||||
}
|
||||
|
||||
impl<T> ErrorExt<T> for std::result::Result<T, std::io::Error> {
|
||||
fn fs_context(self, context: &'static str, path: impl Into<PathBuf>) -> Result<T> {
|
||||
self.map_err(|error| Error::Fs {
|
||||
context,
|
||||
path: path.into(),
|
||||
error,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! bail {
|
||||
($msg:literal $(,)?) => {
|
||||
return Err(crate::Error::GenericError($msg.into()))
|
||||
};
|
||||
($err:expr $(,)?) => {
|
||||
return Err(crate::Error::GenericError($err))
|
||||
};
|
||||
($fmt:expr, $($arg:tt)*) => {
|
||||
return Err(crate::Error::GenericError(format!($fmt, $($arg)*)))
|
||||
};
|
||||
}
|
||||
|
||||
pub(crate) use bail;
|
||||
@ -75,21 +75,13 @@ fn lookup<F: Fn(&PathBuf) -> bool>(dir: &Path, checker: F) -> Option<PathBuf> {
|
||||
}
|
||||
|
||||
fn env_tauri_app_path() -> Option<PathBuf> {
|
||||
std::env::var(ENV_TAURI_APP_PATH)
|
||||
.map(PathBuf::from)
|
||||
.ok()?
|
||||
.canonicalize()
|
||||
.ok()
|
||||
.map(|p| dunce::simplified(&p).to_path_buf())
|
||||
let p = PathBuf::from(std::env::var_os(ENV_TAURI_APP_PATH)?);
|
||||
dunce::canonicalize(p).ok()
|
||||
}
|
||||
|
||||
fn env_tauri_frontend_path() -> Option<PathBuf> {
|
||||
std::env::var(ENV_TAURI_FRONTEND_PATH)
|
||||
.map(PathBuf::from)
|
||||
.ok()?
|
||||
.canonicalize()
|
||||
.ok()
|
||||
.map(|p| dunce::simplified(&p).to_path_buf())
|
||||
let p = PathBuf::from(std::env::var_os(ENV_TAURI_FRONTEND_PATH)?);
|
||||
dunce::canonicalize(p).ok()
|
||||
}
|
||||
|
||||
pub fn resolve_tauri_dir() -> Option<PathBuf> {
|
||||
|
||||
@ -4,7 +4,7 @@
|
||||
|
||||
use std::process::Command;
|
||||
|
||||
use anyhow::Context;
|
||||
use crate::Error;
|
||||
|
||||
#[derive(Debug, Default, Clone, Copy)]
|
||||
pub struct CargoInstallOptions<'a> {
|
||||
@ -41,7 +41,7 @@ pub fn install_one(options: CargoInstallOptions) -> crate::Result<()> {
|
||||
cargo.args(["--branch", branch]);
|
||||
}
|
||||
(None, None, None) => {}
|
||||
_ => anyhow::bail!("Only one of --tag, --rev and --branch can be specified"),
|
||||
_ => crate::error::bail!("Only one of --tag, --rev and --branch can be specified"),
|
||||
};
|
||||
}
|
||||
|
||||
@ -54,9 +54,12 @@ pub fn install_one(options: CargoInstallOptions) -> crate::Result<()> {
|
||||
}
|
||||
|
||||
log::info!("Installing Cargo dependency \"{}\"...", options.name);
|
||||
let status = cargo.status().context("failed to run `cargo add`")?;
|
||||
let status = cargo.status().map_err(|error| Error::CommandFailed {
|
||||
command: "cargo add".to_string(),
|
||||
error,
|
||||
})?;
|
||||
if !status.success() {
|
||||
anyhow::bail!("Failed to install Cargo dependency");
|
||||
crate::error::bail!("Failed to install Cargo dependency");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@ -84,9 +87,12 @@ pub fn uninstall_one(options: CargoUninstallOptions) -> crate::Result<()> {
|
||||
}
|
||||
|
||||
log::info!("Uninstalling Cargo dependency \"{}\"...", options.name);
|
||||
let status = cargo.status().context("failed to run `cargo remove`")?;
|
||||
let status = cargo.status().map_err(|error| Error::CommandFailed {
|
||||
command: "cargo remove".to_string(),
|
||||
error,
|
||||
})?;
|
||||
if !status.success() {
|
||||
anyhow::bail!("Failed to remove Cargo dependency");
|
||||
crate::error::bail!("Failed to remove Cargo dependency");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
||||
@ -131,20 +131,7 @@ struct CrateIoGetResponse {
|
||||
pub fn crate_latest_version(name: &str) -> Option<String> {
|
||||
// Reference: https://github.com/rust-lang/crates.io/blob/98c83c8231cbcd15d6b8f06d80a00ad462f71585/src/controllers/krate/metadata.rs#L88
|
||||
let url = format!("https://crates.io/api/v1/crates/{name}?include");
|
||||
#[cfg(feature = "platform-certs")]
|
||||
let mut response = {
|
||||
let agent = ureq::Agent::config_builder()
|
||||
.tls_config(
|
||||
ureq::tls::TlsConfig::builder()
|
||||
.root_certs(ureq::tls::RootCerts::PlatformVerifier)
|
||||
.build(),
|
||||
)
|
||||
.build()
|
||||
.new_agent();
|
||||
agent.get(&url).call().ok()?
|
||||
};
|
||||
#[cfg(not(feature = "platform-certs"))]
|
||||
let mut response = ureq::get(&url).call().ok()?;
|
||||
let mut response = super::http::get(&url).ok()?;
|
||||
let metadata: CrateIoGetResponse =
|
||||
serde_json::from_reader(response.body_mut().as_reader()).unwrap();
|
||||
metadata.krate.default_version
|
||||
|
||||
@ -12,11 +12,13 @@ pub use tauri_utils::{config::*, platform::Target};
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
env::{current_dir, set_current_dir, set_var},
|
||||
ffi::OsStr,
|
||||
ffi::{OsStr, OsString},
|
||||
process::exit,
|
||||
sync::{Arc, Mutex, OnceLock},
|
||||
sync::Mutex,
|
||||
};
|
||||
|
||||
use crate::error::Context;
|
||||
|
||||
pub const MERGE_CONFIG_EXTENSION_NAME: &str = "--config";
|
||||
|
||||
pub struct ConfigMetadata {
|
||||
@ -28,7 +30,7 @@ pub struct ConfigMetadata {
|
||||
inner: Config,
|
||||
/// The config extensions (platform-specific config files or the config CLI argument).
|
||||
/// Maps the extension name to its value.
|
||||
extensions: HashMap<String, JsonValue>,
|
||||
extensions: HashMap<OsString, JsonValue>,
|
||||
}
|
||||
|
||||
impl std::ops::Deref for ConfigMetadata {
|
||||
@ -48,7 +50,7 @@ impl ConfigMetadata {
|
||||
}
|
||||
|
||||
/// Checks which config is overwriting the bundle identifier.
|
||||
pub fn find_bundle_identifier_overwriter(&self) -> Option<String> {
|
||||
pub fn find_bundle_identifier_overwriter(&self) -> Option<OsString> {
|
||||
for (ext, config) in &self.extensions {
|
||||
if let Some(identifier) = config
|
||||
.as_object()
|
||||
@ -64,7 +66,7 @@ impl ConfigMetadata {
|
||||
}
|
||||
}
|
||||
|
||||
pub type ConfigHandle = Arc<Mutex<Option<ConfigMetadata>>>;
|
||||
pub type ConfigHandle = &'static Mutex<Option<ConfigMetadata>>;
|
||||
|
||||
pub fn wix_settings(config: WixConfig) -> tauri_bundler::WixSettings {
|
||||
tauri_bundler::WixSettings {
|
||||
@ -139,9 +141,9 @@ pub fn custom_sign_settings(
|
||||
}
|
||||
}
|
||||
|
||||
fn config_handle() -> &'static ConfigHandle {
|
||||
static CONFIG_HANDLE: OnceLock<ConfigHandle> = OnceLock::new();
|
||||
CONFIG_HANDLE.get_or_init(Default::default)
|
||||
fn config_handle() -> ConfigHandle {
|
||||
static CONFIG_HANDLE: Mutex<Option<ConfigMetadata>> = Mutex::new(None);
|
||||
&CONFIG_HANDLE
|
||||
}
|
||||
|
||||
/// Gets the static parsed config from `tauri.conf.json`.
|
||||
@ -151,13 +153,14 @@ fn get_internal(
|
||||
target: Target,
|
||||
) -> crate::Result<ConfigHandle> {
|
||||
if !reload && config_handle().lock().unwrap().is_some() {
|
||||
return Ok(config_handle().clone());
|
||||
return Ok(config_handle());
|
||||
}
|
||||
|
||||
let tauri_dir = super::app_paths::tauri_dir();
|
||||
let (mut config, config_path) =
|
||||
tauri_utils::config::parse::parse_value(target, tauri_dir.join("tauri.conf.json"))?;
|
||||
let config_file_name = config_path.file_name().unwrap().to_string_lossy();
|
||||
tauri_utils::config::parse::parse_value(target, tauri_dir.join("tauri.conf.json"))
|
||||
.context("failed to parse config")?;
|
||||
let config_file_name = config_path.file_name().unwrap();
|
||||
let mut extensions = HashMap::new();
|
||||
|
||||
let original_identifier = config
|
||||
@ -167,13 +170,11 @@ fn get_internal(
|
||||
.map(ToString::to_string);
|
||||
|
||||
if let Some((platform_config, config_path)) =
|
||||
tauri_utils::config::parse::read_platform(target, tauri_dir)?
|
||||
tauri_utils::config::parse::read_platform(target, tauri_dir)
|
||||
.context("failed to parse platform config")?
|
||||
{
|
||||
merge(&mut config, &platform_config);
|
||||
extensions.insert(
|
||||
config_path.file_name().unwrap().to_str().unwrap().into(),
|
||||
platform_config,
|
||||
);
|
||||
extensions.insert(config_path.file_name().unwrap().into(), platform_config);
|
||||
}
|
||||
|
||||
if !merge_configs.is_empty() {
|
||||
@ -191,16 +192,17 @@ fn get_internal(
|
||||
if config_path.extension() == Some(OsStr::new("json"))
|
||||
|| config_path.extension() == Some(OsStr::new("json5"))
|
||||
{
|
||||
let schema: JsonValue = serde_json::from_str(include_str!("../../config.schema.json"))?;
|
||||
let schema: JsonValue = serde_json::from_str(include_str!("../../config.schema.json"))
|
||||
.context("failed to parse config schema")?;
|
||||
let validator = jsonschema::validator_for(&schema).expect("Invalid schema");
|
||||
let mut errors = validator.iter_errors(&config).peekable();
|
||||
if errors.peek().is_some() {
|
||||
for error in errors {
|
||||
let path = error.instance_path.into_iter().join(" > ");
|
||||
if path.is_empty() {
|
||||
log::error!("`{}` error: {}", config_file_name, error);
|
||||
log::error!("`{config_file_name:?}` error: {}", error);
|
||||
} else {
|
||||
log::error!("`{}` error on `{}`: {}", config_file_name, path, error);
|
||||
log::error!("`{config_file_name:?}` error on `{}`: {}", path, error);
|
||||
}
|
||||
}
|
||||
if !reload {
|
||||
@ -211,11 +213,11 @@ fn get_internal(
|
||||
|
||||
// the `Config` deserializer for `package > version` can resolve the version from a path relative to the config path
|
||||
// so we actually need to change the current working directory here
|
||||
let current_dir = current_dir()?;
|
||||
set_current_dir(config_path.parent().unwrap())?;
|
||||
let config: Config = serde_json::from_value(config)?;
|
||||
let current_dir = current_dir().context("failed to resolve current directory")?;
|
||||
set_current_dir(config_path.parent().unwrap()).context("failed to set current directory")?;
|
||||
let config: Config = serde_json::from_value(config).context("failed to parse config")?;
|
||||
// revert to previous working directory
|
||||
set_current_dir(current_dir)?;
|
||||
set_current_dir(current_dir).context("failed to set current directory")?;
|
||||
|
||||
for (plugin, conf) in &config.plugins.0 {
|
||||
set_var(
|
||||
@ -223,7 +225,7 @@ fn get_internal(
|
||||
"TAURI_{}_PLUGIN_CONFIG",
|
||||
plugin.to_uppercase().replace('-', "_")
|
||||
),
|
||||
serde_json::to_string(&conf)?,
|
||||
serde_json::to_string(&conf).context("failed to serialize config")?,
|
||||
);
|
||||
}
|
||||
|
||||
@ -238,7 +240,7 @@ fn get_internal(
|
||||
extensions,
|
||||
});
|
||||
|
||||
Ok(config_handle().clone())
|
||||
Ok(config_handle())
|
||||
}
|
||||
|
||||
pub fn get(target: Target, merge_configs: &[&serde_json::Value]) -> crate::Result<ConfigHandle> {
|
||||
@ -254,7 +256,7 @@ pub fn reload(merge_configs: &[&serde_json::Value]) -> crate::Result<ConfigHandl
|
||||
if let Some(target) = target {
|
||||
get_internal(merge_configs, true, target)
|
||||
} else {
|
||||
Err(anyhow::anyhow!("config not loaded"))
|
||||
crate::error::bail!("config not loaded");
|
||||
}
|
||||
}
|
||||
|
||||
@ -263,7 +265,7 @@ pub fn merge_with(merge_configs: &[&serde_json::Value]) -> crate::Result<ConfigH
|
||||
let handle = config_handle();
|
||||
|
||||
if merge_configs.is_empty() {
|
||||
return Ok(handle.clone());
|
||||
return Ok(handle);
|
||||
}
|
||||
|
||||
if let Some(config_metadata) = &mut *handle.lock().unwrap() {
|
||||
@ -275,13 +277,14 @@ pub fn merge_with(merge_configs: &[&serde_json::Value]) -> crate::Result<ConfigH
|
||||
let merge_config_str = serde_json::to_string(&merge_config).unwrap();
|
||||
set_var("TAURI_CONFIG", merge_config_str);
|
||||
|
||||
let mut value = serde_json::to_value(config_metadata.inner.clone())?;
|
||||
let mut value =
|
||||
serde_json::to_value(config_metadata.inner.clone()).context("failed to serialize config")?;
|
||||
merge(&mut value, &merge_config);
|
||||
config_metadata.inner = serde_json::from_value(value)?;
|
||||
config_metadata.inner = serde_json::from_value(value).context("failed to parse config")?;
|
||||
|
||||
Ok(handle.clone())
|
||||
Ok(handle)
|
||||
} else {
|
||||
Err(anyhow::anyhow!("config not loaded"))
|
||||
crate::error::bail!("config not loaded");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -10,8 +10,7 @@ use std::io;
|
||||
use std::io::{Read, Seek, SeekFrom, Write};
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use crate::Result;
|
||||
use anyhow::Context as _;
|
||||
use crate::{error::ErrorExt, Error, Result};
|
||||
use sys::*;
|
||||
|
||||
#[derive(Debug)]
|
||||
@ -129,17 +128,25 @@ fn open(path: &Path, opts: &OpenOptions, state: State, msg: &str) -> Result<File
|
||||
// If we want an exclusive lock then if we fail because of NotFound it's
|
||||
// likely because an intermediate directory didn't exist, so try to
|
||||
// create the directory and then continue.
|
||||
let f = opts
|
||||
.open(path)
|
||||
.or_else(|e| {
|
||||
if e.kind() == io::ErrorKind::NotFound && state == State::Exclusive {
|
||||
create_dir_all(path.parent().unwrap())?;
|
||||
Ok(opts.open(path)?)
|
||||
} else {
|
||||
Err(anyhow::Error::from(e))
|
||||
}
|
||||
})
|
||||
.with_context(|| format!("failed to open: {}", path.display()))?;
|
||||
let f = opts.open(path).or_else(|e| {
|
||||
if e.kind() == io::ErrorKind::NotFound && state == State::Exclusive {
|
||||
create_dir_all(path.parent().unwrap()).fs_context(
|
||||
"failed to create directory",
|
||||
path.parent().unwrap().to_path_buf(),
|
||||
)?;
|
||||
Ok(
|
||||
opts
|
||||
.open(path)
|
||||
.fs_context("failed to open file", path.to_path_buf())?,
|
||||
)
|
||||
} else {
|
||||
Err(Error::Fs {
|
||||
context: "failed to open file",
|
||||
path: path.to_path_buf(),
|
||||
error: e,
|
||||
})
|
||||
}
|
||||
})?;
|
||||
match state {
|
||||
State::Exclusive => {
|
||||
acquire(msg, path, &|| try_lock_exclusive(&f), &|| {
|
||||
@ -203,16 +210,18 @@ fn acquire(
|
||||
|
||||
Err(e) => {
|
||||
if !error_contended(&e) {
|
||||
let e = anyhow::Error::from(e);
|
||||
let cx = format!("failed to lock file: {}", path.display());
|
||||
return Err(e.context(cx));
|
||||
return Err(Error::Fs {
|
||||
context: "failed to lock file",
|
||||
path: path.to_path_buf(),
|
||||
error: e,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
let msg = format!("waiting for file lock on {msg}");
|
||||
log::info!(action = "Blocking"; "{}", &msg);
|
||||
|
||||
lock_block().with_context(|| format!("failed to lock file: {}", path.display()))?;
|
||||
lock_block().fs_context("failed to lock file", path.to_path_buf())?;
|
||||
return Ok(());
|
||||
|
||||
#[cfg(all(target_os = "linux", not(target_env = "musl")))]
|
||||
|
||||
@ -2,20 +2,54 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
use anyhow::Result;
|
||||
use std::path::Path;
|
||||
use crate::{
|
||||
error::{Context, ErrorExt},
|
||||
Error,
|
||||
};
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
pub fn copy_file(from: impl AsRef<Path>, to: impl AsRef<Path>) -> Result<()> {
|
||||
pub fn copy_file(from: impl AsRef<Path>, to: impl AsRef<Path>) -> crate::Result<()> {
|
||||
let from = from.as_ref();
|
||||
let to = to.as_ref();
|
||||
if !from.exists() {
|
||||
return Err(anyhow::anyhow!("{:?} does not exist", from));
|
||||
Err(Error::Fs {
|
||||
context: "failed to copy file",
|
||||
path: from.to_path_buf(),
|
||||
error: std::io::Error::new(std::io::ErrorKind::NotFound, "source does not exist"),
|
||||
})?;
|
||||
}
|
||||
if !from.is_file() {
|
||||
return Err(anyhow::anyhow!("{:?} is not a file", from));
|
||||
Err(Error::Fs {
|
||||
context: "failed to copy file",
|
||||
path: from.to_path_buf(),
|
||||
error: std::io::Error::other("not a file"),
|
||||
})?;
|
||||
}
|
||||
let dest_dir = to.parent().expect("No data in parent");
|
||||
std::fs::create_dir_all(dest_dir)?;
|
||||
std::fs::copy(from, to)?;
|
||||
std::fs::create_dir_all(dest_dir)
|
||||
.fs_context("failed to create directory", dest_dir.to_path_buf())?;
|
||||
std::fs::copy(from, to).fs_context("failed to copy file", from.to_path_buf())?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Find an entry in a directory matching a glob pattern.
|
||||
/// Currently does not traverse subdirectories.
|
||||
// currently only used on macOS
|
||||
#[allow(dead_code)]
|
||||
pub fn find_in_directory(path: &Path, glob_pattern: &str) -> crate::Result<PathBuf> {
|
||||
let pattern = glob::Pattern::new(glob_pattern)
|
||||
.with_context(|| format!("failed to parse glob pattern {glob_pattern}"))?;
|
||||
for entry in std::fs::read_dir(path)
|
||||
.with_context(|| format!("failed to read directory {}", path.display()))?
|
||||
{
|
||||
let entry = entry.context("failed to read directory entry")?;
|
||||
if pattern.matches_path(&entry.path()) {
|
||||
return Ok(entry.path());
|
||||
}
|
||||
}
|
||||
crate::error::bail!(
|
||||
"No file found in {} matching {}",
|
||||
path.display(),
|
||||
glob_pattern
|
||||
)
|
||||
}
|
||||
|
||||
26
crates/tauri-cli/src/helpers/http.rs
Normal file
26
crates/tauri-cli/src/helpers/http.rs
Normal file
@ -0,0 +1,26 @@
|
||||
// Copyright 2019-2024 Tauri Programme within The Commons Conservancy
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
use ureq::{http::Response, Agent, Body};
|
||||
|
||||
const CLI_USER_AGENT: &str = concat!(env!("CARGO_PKG_NAME"), "/", env!("CARGO_PKG_VERSION"),);
|
||||
|
||||
pub fn get(url: &str) -> Result<Response<Body>, ureq::Error> {
|
||||
#[allow(unused_mut)]
|
||||
let mut config_builder = ureq::Agent::config_builder()
|
||||
.user_agent(CLI_USER_AGENT)
|
||||
.proxy(ureq::Proxy::try_from_env());
|
||||
|
||||
#[cfg(feature = "platform-certs")]
|
||||
{
|
||||
config_builder = config_builder.tls_config(
|
||||
ureq::tls::TlsConfig::builder()
|
||||
.root_certs(ureq::tls::RootCerts::PlatformVerifier)
|
||||
.build(),
|
||||
);
|
||||
}
|
||||
|
||||
let agent: Agent = config_builder.build().into();
|
||||
agent.get(url).call()
|
||||
}
|
||||
@ -9,9 +9,12 @@ pub mod config;
|
||||
pub mod flock;
|
||||
pub mod framework;
|
||||
pub mod fs;
|
||||
pub mod http;
|
||||
pub mod npm;
|
||||
#[cfg(target_os = "macos")]
|
||||
pub mod pbxproj;
|
||||
#[cfg(target_os = "macos")]
|
||||
pub mod plist;
|
||||
pub mod plugins;
|
||||
pub mod prompts;
|
||||
pub mod template;
|
||||
@ -23,9 +26,10 @@ use std::{
|
||||
process::Command,
|
||||
};
|
||||
|
||||
use anyhow::Context;
|
||||
use tauri_utils::config::HookCommand;
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
use crate::Error;
|
||||
use crate::{
|
||||
interface::{AppInterface, Interface},
|
||||
CommandExt,
|
||||
@ -97,7 +101,10 @@ pub fn run_hook(
|
||||
.current_dir(cwd)
|
||||
.envs(env)
|
||||
.piped()
|
||||
.with_context(|| format!("failed to run `{script}` with `cmd /C`"))?;
|
||||
.map_err(|error| crate::error::Error::CommandFailed {
|
||||
command: script.clone(),
|
||||
error,
|
||||
})?;
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
let status = Command::new("sh")
|
||||
.arg("-c")
|
||||
@ -105,10 +112,13 @@ pub fn run_hook(
|
||||
.current_dir(cwd)
|
||||
.envs(env)
|
||||
.piped()
|
||||
.with_context(|| format!("failed to run `{script}` with `sh -c`"))?;
|
||||
.map_err(|error| Error::CommandFailed {
|
||||
command: script.clone(),
|
||||
error,
|
||||
})?;
|
||||
|
||||
if !status.success() {
|
||||
anyhow::bail!(
|
||||
crate::error::bail!(
|
||||
"{} `{}` failed with exit code {}",
|
||||
name,
|
||||
script,
|
||||
@ -122,6 +132,7 @@ pub fn run_hook(
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
pub fn strip_semver_prerelease_tag(version: &mut semver::Version) -> crate::Result<()> {
|
||||
use crate::error::Context;
|
||||
if !version.pre.is_empty() {
|
||||
if let Some((_prerelease_tag, number)) = version.pre.as_str().to_string().split_once('.') {
|
||||
version.pre = semver::Prerelease::EMPTY;
|
||||
@ -133,7 +144,11 @@ pub fn strip_semver_prerelease_tag(version: &mut semver::Version) -> crate::Resu
|
||||
format!(".{}", version.build.as_str())
|
||||
}
|
||||
))
|
||||
.with_context(|| format!("bundle version {number:?} prerelease is invalid"))?;
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"failed to parse {version} as semver: bundle version {number:?} prerelease is invalid"
|
||||
)
|
||||
})?;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -2,10 +2,12 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
use anyhow::Context;
|
||||
use serde::Deserialize;
|
||||
|
||||
use crate::helpers::cross_command;
|
||||
use crate::{
|
||||
error::{Context, Error},
|
||||
helpers::cross_command,
|
||||
};
|
||||
use std::{collections::HashMap, fmt::Display, path::Path, process::Command};
|
||||
|
||||
pub fn manager_version(package_manager: &str) -> Option<String> {
|
||||
@ -151,10 +153,13 @@ impl PackageManager {
|
||||
let status = command
|
||||
.current_dir(frontend_dir)
|
||||
.status()
|
||||
.with_context(|| format!("failed to run {self}"))?;
|
||||
.map_err(|error| Error::CommandFailed {
|
||||
command: format!("failed to run {self}"),
|
||||
error,
|
||||
})?;
|
||||
|
||||
if !status.success() {
|
||||
anyhow::bail!("Failed to install NPM {dependencies_str}");
|
||||
crate::error::bail!("Failed to install NPM {dependencies_str}");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@ -189,10 +194,13 @@ impl PackageManager {
|
||||
.args(dependencies)
|
||||
.current_dir(frontend_dir)
|
||||
.status()
|
||||
.with_context(|| format!("failed to run {self}"))?;
|
||||
.map_err(|error| Error::CommandFailed {
|
||||
command: format!("failed to run {self}"),
|
||||
error,
|
||||
})?;
|
||||
|
||||
if !status.success() {
|
||||
anyhow::bail!("Failed to remove NPM {dependencies_str}");
|
||||
crate::error::bail!("Failed to remove NPM {dependencies_str}");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@ -211,7 +219,11 @@ impl PackageManager {
|
||||
.arg(name)
|
||||
.args(["--depth", "0"])
|
||||
.current_dir(frontend_dir)
|
||||
.output()?,
|
||||
.output()
|
||||
.map_err(|error| Error::CommandFailed {
|
||||
command: "yarn list --pattern".to_string(),
|
||||
error,
|
||||
})?,
|
||||
None,
|
||||
),
|
||||
PackageManager::YarnBerry => (
|
||||
@ -220,7 +232,11 @@ impl PackageManager {
|
||||
.arg(name)
|
||||
.arg("--json")
|
||||
.current_dir(frontend_dir)
|
||||
.output()?,
|
||||
.output()
|
||||
.map_err(|error| Error::CommandFailed {
|
||||
command: "yarn info --json".to_string(),
|
||||
error,
|
||||
})?,
|
||||
Some(regex::Regex::new("\"Version\":\"([\\da-zA-Z\\-\\.]+)\"").unwrap()),
|
||||
),
|
||||
PackageManager::Pnpm => (
|
||||
@ -229,7 +245,11 @@ impl PackageManager {
|
||||
.arg(name)
|
||||
.args(["--parseable", "--depth", "0"])
|
||||
.current_dir(frontend_dir)
|
||||
.output()?,
|
||||
.output()
|
||||
.map_err(|error| Error::CommandFailed {
|
||||
command: "pnpm list --parseable --depth 0".to_string(),
|
||||
error,
|
||||
})?,
|
||||
None,
|
||||
),
|
||||
// Bun and Deno don't support `list` command
|
||||
@ -239,7 +259,11 @@ impl PackageManager {
|
||||
.arg(name)
|
||||
.args(["version", "--depth", "0"])
|
||||
.current_dir(frontend_dir)
|
||||
.output()?,
|
||||
.output()
|
||||
.map_err(|error| Error::CommandFailed {
|
||||
command: "npm list --version --depth 0".to_string(),
|
||||
error,
|
||||
})?,
|
||||
None,
|
||||
),
|
||||
};
|
||||
@ -270,14 +294,22 @@ impl PackageManager {
|
||||
.args(packages)
|
||||
.args(["--json", "--depth", "0"])
|
||||
.current_dir(frontend_dir)
|
||||
.output()?,
|
||||
.output()
|
||||
.map_err(|error| Error::CommandFailed {
|
||||
command: "pnpm list --json --depth 0".to_string(),
|
||||
error,
|
||||
})?,
|
||||
// Bun and Deno don't support `list` command
|
||||
PackageManager::Npm | PackageManager::Bun | PackageManager::Deno => cross_command("npm")
|
||||
.arg("list")
|
||||
.args(packages)
|
||||
.args(["--json", "--depth", "0"])
|
||||
.current_dir(frontend_dir)
|
||||
.output()?,
|
||||
.output()
|
||||
.map_err(|error| Error::CommandFailed {
|
||||
command: "npm list --json --depth 0".to_string(),
|
||||
error,
|
||||
})?,
|
||||
};
|
||||
|
||||
let mut versions = HashMap::new();
|
||||
@ -300,13 +332,20 @@ impl PackageManager {
|
||||
version: String,
|
||||
}
|
||||
|
||||
let json: ListOutput = serde_json::from_str(&stdout)?;
|
||||
let json = if matches!(self, PackageManager::Pnpm) {
|
||||
serde_json::from_str::<Vec<ListOutput>>(&stdout)
|
||||
.ok()
|
||||
.and_then(|out| out.into_iter().next())
|
||||
.context("failed to parse pnpm list")?
|
||||
} else {
|
||||
serde_json::from_str::<ListOutput>(&stdout).context("failed to parse npm list")?
|
||||
};
|
||||
for (package, dependency) in json.dependencies.into_iter().chain(json.dev_dependencies) {
|
||||
let version = dependency.version;
|
||||
if let Ok(version) = semver::Version::parse(&version) {
|
||||
versions.insert(package, version);
|
||||
} else {
|
||||
log::error!("Failed to parse version `{version}` for NPM package `{package}`");
|
||||
log::debug!("Failed to parse version `{version}` for NPM package `{package}`");
|
||||
}
|
||||
}
|
||||
Ok(versions)
|
||||
@ -322,7 +361,11 @@ fn yarn_package_versions(
|
||||
.args(packages)
|
||||
.args(["--json", "--depth", "0"])
|
||||
.current_dir(frontend_dir)
|
||||
.output()?;
|
||||
.output()
|
||||
.map_err(|error| Error::CommandFailed {
|
||||
command: "yarn list --json --depth 0".to_string(),
|
||||
error,
|
||||
})?;
|
||||
|
||||
let mut versions = HashMap::new();
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
@ -354,7 +397,7 @@ fn yarn_package_versions(
|
||||
if let Ok(version) = semver::Version::parse(version) {
|
||||
versions.insert(name.to_owned(), version);
|
||||
} else {
|
||||
log::error!("Failed to parse version `{version}` for NPM package `{name}`");
|
||||
log::debug!("Failed to parse version `{version}` for NPM package `{name}`");
|
||||
}
|
||||
}
|
||||
return Ok(versions);
|
||||
@ -371,7 +414,11 @@ fn yarn_berry_package_versions(
|
||||
let output = cross_command("yarn")
|
||||
.args(["info", "--json"])
|
||||
.current_dir(frontend_dir)
|
||||
.output()?;
|
||||
.output()
|
||||
.map_err(|error| Error::CommandFailed {
|
||||
command: "yarn info --json".to_string(),
|
||||
error,
|
||||
})?;
|
||||
|
||||
let mut versions = HashMap::new();
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
@ -403,7 +450,7 @@ fn yarn_berry_package_versions(
|
||||
if let Ok(version) = semver::Version::parse(&version) {
|
||||
versions.insert(name.to_owned(), version);
|
||||
} else {
|
||||
log::error!("Failed to parse version `{version}` for NPM package `{name}`");
|
||||
log::debug!("Failed to parse version `{version}` for NPM package `{name}`");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -8,9 +8,12 @@ use std::{
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use crate::error::ErrorExt;
|
||||
|
||||
pub fn parse<P: AsRef<Path>>(path: P) -> crate::Result<Pbxproj> {
|
||||
let path = path.as_ref();
|
||||
let pbxproj = std::fs::read_to_string(path)?;
|
||||
let pbxproj =
|
||||
std::fs::read_to_string(path).fs_context("failed to read pbxproj file", path.to_path_buf())?;
|
||||
|
||||
let mut proj = Pbxproj {
|
||||
path: path.to_owned(),
|
||||
@ -171,7 +174,7 @@ enum State {
|
||||
}
|
||||
|
||||
pub struct Pbxproj {
|
||||
path: PathBuf,
|
||||
pub path: PathBuf,
|
||||
raw_lines: Vec<String>,
|
||||
pub xc_build_configuration: BTreeMap<String, XCBuildConfiguration>,
|
||||
pub xc_configuration_list: BTreeMap<String, XCConfigurationList>,
|
||||
|
||||
42
crates/tauri-cli/src/helpers/plist.rs
Normal file
42
crates/tauri-cli/src/helpers/plist.rs
Normal file
@ -0,0 +1,42 @@
|
||||
// Copyright 2019-2024 Tauri Programme within The Commons Conservancy
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::error::Context;
|
||||
|
||||
pub enum PlistKind {
|
||||
Path(PathBuf),
|
||||
Plist(plist::Value),
|
||||
}
|
||||
|
||||
impl From<PathBuf> for PlistKind {
|
||||
fn from(p: PathBuf) -> Self {
|
||||
Self::Path(p)
|
||||
}
|
||||
}
|
||||
impl From<plist::Value> for PlistKind {
|
||||
fn from(p: plist::Value) -> Self {
|
||||
Self::Plist(p)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn merge_plist(src: Vec<PlistKind>) -> crate::Result<plist::Value> {
|
||||
let mut merged_plist = plist::Dictionary::new();
|
||||
|
||||
for plist_kind in src {
|
||||
let src_plist = match plist_kind {
|
||||
PlistKind::Path(p) => plist::Value::from_file(&p)
|
||||
.with_context(|| format!("failed to parse plist from {}", p.display()))?,
|
||||
PlistKind::Plist(v) => v,
|
||||
};
|
||||
if let Some(dict) = src_plist.into_dictionary() {
|
||||
for (key, value) in dict {
|
||||
merged_plist.insert(key, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(plist::Value::Dictionary(merged_plist))
|
||||
}
|
||||
@ -4,7 +4,7 @@
|
||||
|
||||
use std::{fmt::Display, str::FromStr};
|
||||
|
||||
use crate::Result;
|
||||
use crate::{error::Context, Result};
|
||||
|
||||
pub fn input<T>(
|
||||
prompt: &str,
|
||||
@ -32,7 +32,7 @@ where
|
||||
builder
|
||||
.interact_text()
|
||||
.map(|t: T| if t.ne("") { Some(t) } else { None })
|
||||
.map_err(Into::into)
|
||||
.context("failed to prompt input")
|
||||
}
|
||||
}
|
||||
|
||||
@ -42,7 +42,7 @@ pub fn confirm(prompt: &str, default: Option<bool>) -> Result<bool> {
|
||||
if let Some(default) = default {
|
||||
builder = builder.default(default);
|
||||
}
|
||||
builder.interact().map_err(Into::into)
|
||||
builder.interact().context("failed to prompt confirm")
|
||||
}
|
||||
|
||||
pub fn multiselect<T: ToString>(
|
||||
@ -57,5 +57,5 @@ pub fn multiselect<T: ToString>(
|
||||
if let Some(defaults) = defaults {
|
||||
builder = builder.defaults(defaults);
|
||||
}
|
||||
builder.interact().map_err(Into::into)
|
||||
builder.interact().context("failed to prompt multi-select")
|
||||
}
|
||||
|
||||
@ -13,6 +13,8 @@ use include_dir::Dir;
|
||||
use serde::Serialize;
|
||||
use serde_json::value::{Map, Value as JsonValue};
|
||||
|
||||
use crate::error::ErrorExt;
|
||||
|
||||
/// Map of template variable names and values.
|
||||
#[derive(Clone, Debug)]
|
||||
#[repr(transparent)]
|
||||
@ -74,13 +76,17 @@ pub fn render_with_generator<
|
||||
file_path.set_extension("toml");
|
||||
}
|
||||
}
|
||||
if let Some(mut output_file) = out_file_generator(file_path)? {
|
||||
if let Some(mut output_file) = out_file_generator(file_path.clone())
|
||||
.fs_context("failed to generate output file", file_path.clone())?
|
||||
{
|
||||
if let Some(utf8) = file.contents_utf8() {
|
||||
handlebars
|
||||
.render_template_to_write(utf8, &data, &mut output_file)
|
||||
.expect("Failed to render template");
|
||||
} else {
|
||||
output_file.write_all(file.contents())?;
|
||||
output_file
|
||||
.write_all(file.contents())
|
||||
.fs_context("failed to write template", file_path.clone())?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -2,7 +2,6 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
use anyhow::Context;
|
||||
use base64::Engine;
|
||||
use minisign::{
|
||||
sign, KeyPair as KP, PublicKey, PublicKeyBox, SecretKey, SecretKeyBox, SignatureBox,
|
||||
@ -15,6 +14,8 @@ use std::{
|
||||
time::{SystemTime, UNIX_EPOCH},
|
||||
};
|
||||
|
||||
use crate::error::{Context, ErrorExt};
|
||||
|
||||
/// A key pair (`PublicKey` and `SecretKey`).
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct KeyPair {
|
||||
@ -24,9 +25,9 @@ pub struct KeyPair {
|
||||
|
||||
fn create_file(path: &Path) -> crate::Result<BufWriter<File>> {
|
||||
if let Some(parent) = path.parent() {
|
||||
fs::create_dir_all(parent)?;
|
||||
fs::create_dir_all(parent).fs_context("failed to create directory", parent.to_path_buf())?;
|
||||
}
|
||||
let file = File::create(path)?;
|
||||
let file = File::create(path).fs_context("failed to create file", path.to_path_buf())?;
|
||||
Ok(BufWriter::new(file))
|
||||
}
|
||||
|
||||
@ -48,8 +49,12 @@ pub fn generate_key(password: Option<String>) -> crate::Result<KeyPair> {
|
||||
|
||||
/// Transform a base64 String to readable string for the main signer
|
||||
pub fn decode_key<S: AsRef<[u8]>>(base64_key: S) -> crate::Result<String> {
|
||||
let decoded_str = &base64::engine::general_purpose::STANDARD.decode(base64_key)?[..];
|
||||
Ok(String::from(str::from_utf8(decoded_str)?))
|
||||
let decoded_str = &base64::engine::general_purpose::STANDARD
|
||||
.decode(base64_key)
|
||||
.context("failed to decode base64 key")?[..];
|
||||
Ok(String::from(
|
||||
str::from_utf8(decoded_str).context("failed to convert base64 to utf8")?,
|
||||
))
|
||||
}
|
||||
|
||||
/// Save KeyPair to disk
|
||||
@ -69,28 +74,43 @@ where
|
||||
|
||||
if sk_path.exists() {
|
||||
if !force {
|
||||
return Err(anyhow::anyhow!(
|
||||
crate::error::bail!(
|
||||
"Key generation aborted:\n{} already exists\nIf you really want to overwrite the existing key pair, add the --force switch to force this operation.",
|
||||
sk_path.display()
|
||||
));
|
||||
);
|
||||
} else {
|
||||
std::fs::remove_file(sk_path)?;
|
||||
std::fs::remove_file(sk_path)
|
||||
.fs_context("failed to remove secret key file", sk_path.to_path_buf())?;
|
||||
}
|
||||
}
|
||||
|
||||
if pk_path.exists() {
|
||||
std::fs::remove_file(pk_path)?;
|
||||
std::fs::remove_file(pk_path)
|
||||
.fs_context("failed to remove public key file", pk_path.to_path_buf())?;
|
||||
}
|
||||
|
||||
let mut sk_writer = create_file(sk_path)?;
|
||||
write!(sk_writer, "{key:}")?;
|
||||
sk_writer.flush()?;
|
||||
let write_file = |mut writer: BufWriter<File>, contents: &str| -> std::io::Result<()> {
|
||||
write!(writer, "{contents:}")?;
|
||||
writer.flush()?;
|
||||
Ok(())
|
||||
};
|
||||
|
||||
let mut pk_writer = create_file(pk_path)?;
|
||||
write!(pk_writer, "{pubkey:}")?;
|
||||
pk_writer.flush()?;
|
||||
write_file(create_file(sk_path)?, key)
|
||||
.fs_context("failed to write secret key", sk_path.to_path_buf())?;
|
||||
|
||||
Ok((fs::canonicalize(sk_path)?, fs::canonicalize(pk_path)?))
|
||||
write_file(create_file(pk_path)?, pubkey)
|
||||
.fs_context("failed to write public key", pk_path.to_path_buf())?;
|
||||
|
||||
Ok((
|
||||
fs::canonicalize(sk_path).fs_context(
|
||||
"failed to canonicalize secret key path",
|
||||
sk_path.to_path_buf(),
|
||||
)?,
|
||||
fs::canonicalize(pk_path).fs_context(
|
||||
"failed to canonicalize public key path",
|
||||
pk_path.to_path_buf(),
|
||||
)?,
|
||||
))
|
||||
}
|
||||
|
||||
/// Sign files
|
||||
@ -104,8 +124,6 @@ where
|
||||
extension.push(".sig");
|
||||
let signature_path = bin_path.with_extension(extension);
|
||||
|
||||
let mut signature_box_writer = create_file(&signature_path)?;
|
||||
|
||||
let trusted_comment = format!(
|
||||
"timestamp:{}\tfile:{}",
|
||||
unix_timestamp(),
|
||||
@ -120,13 +138,18 @@ where
|
||||
data_reader,
|
||||
Some(trusted_comment.as_str()),
|
||||
Some("signature from tauri secret key"),
|
||||
)?;
|
||||
)
|
||||
.context("failed to sign file")?;
|
||||
|
||||
let encoded_signature =
|
||||
base64::engine::general_purpose::STANDARD.encode(signature_box.to_string());
|
||||
signature_box_writer.write_all(encoded_signature.as_bytes())?;
|
||||
signature_box_writer.flush()?;
|
||||
Ok((fs::canonicalize(&signature_path)?, signature_box))
|
||||
std::fs::write(&signature_path, encoded_signature.as_bytes())
|
||||
.fs_context("failed to write signature file", signature_path.clone())?;
|
||||
Ok((
|
||||
fs::canonicalize(&signature_path)
|
||||
.fs_context("failed to canonicalize signature file", &signature_path)?,
|
||||
signature_box,
|
||||
))
|
||||
}
|
||||
|
||||
/// Gets the updater secret key from the given private key and password.
|
||||
@ -148,7 +171,9 @@ pub fn pub_key<S: AsRef<[u8]>>(public_key: S) -> crate::Result<PublicKey> {
|
||||
let decoded_publick = decode_key(public_key).context("failed to decode base64 pubkey")?;
|
||||
let pk_box =
|
||||
PublicKeyBox::from_string(&decoded_publick).context("failed to load updater pubkey")?;
|
||||
let pk = pk_box.into_public_key()?;
|
||||
let pk = pk_box
|
||||
.into_public_key()
|
||||
.context("failed to convert updater pubkey")?;
|
||||
Ok(pk)
|
||||
}
|
||||
|
||||
@ -168,7 +193,7 @@ where
|
||||
let file = OpenOptions::new()
|
||||
.read(true)
|
||||
.open(data_path)
|
||||
.map_err(|e| minisign::PError::new(minisign::ErrorKind::Io, e))?;
|
||||
.fs_context("failed to open data file", data_path.to_path_buf())?;
|
||||
Ok(BufReader::new(file))
|
||||
}
|
||||
|
||||
@ -176,7 +201,7 @@ where
|
||||
mod tests {
|
||||
const PRIVATE_KEY: &str = "dW50cnVzdGVkIGNvbW1lbnQ6IHJzaWduIGVuY3J5cHRlZCBzZWNyZXQga2V5ClJXUlRZMEl5dkpDN09RZm5GeVAzc2RuYlNzWVVJelJRQnNIV2JUcGVXZUplWXZXYXpqUUFBQkFBQUFBQUFBQUFBQUlBQUFBQTZrN2RnWGh5dURxSzZiL1ZQSDdNcktiaHRxczQwMXdQelRHbjRNcGVlY1BLMTBxR2dpa3I3dDE1UTVDRDE4MXR4WlQwa1BQaXdxKy9UU2J2QmVSNXhOQWFDeG1GSVllbUNpTGJQRkhhTnROR3I5RmdUZi90OGtvaGhJS1ZTcjdZU0NyYzhQWlQ5cGM9Cg==";
|
||||
|
||||
// we use minisign=0.7.3 to prevent a breaking change
|
||||
// minisign >=0.7.4,<0.8.0 couldn't handle empty passwords.
|
||||
#[test]
|
||||
fn empty_password_is_valid() {
|
||||
let path = std::env::temp_dir().join("minisign-password-text.txt");
|
||||
|
||||
@ -2,9 +2,14 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
use crate::{helpers::app_paths::tauri_dir, Result};
|
||||
use crate::{
|
||||
error::{Context, Error, ErrorExt},
|
||||
helpers::app_paths::tauri_dir,
|
||||
Result,
|
||||
};
|
||||
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
collections::HashMap,
|
||||
fs::{create_dir_all, File},
|
||||
io::{BufWriter, Write},
|
||||
@ -13,7 +18,6 @@ use std::{
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
use anyhow::Context;
|
||||
use clap::Parser;
|
||||
use icns::{IconFamily, IconType};
|
||||
use image::{
|
||||
@ -22,8 +26,9 @@ use image::{
|
||||
png::{CompressionType, FilterType as PngFilterType, PngEncoder},
|
||||
},
|
||||
imageops::FilterType,
|
||||
open, DynamicImage, ExtendedColorType, ImageBuffer, ImageEncoder, Rgba,
|
||||
open, DynamicImage, ExtendedColorType, GenericImageView, ImageBuffer, ImageEncoder, Pixel, Rgba,
|
||||
};
|
||||
use rayon::iter::ParallelIterator;
|
||||
use resvg::{tiny_skia, usvg};
|
||||
use serde::Deserialize;
|
||||
|
||||
@ -40,10 +45,48 @@ struct PngEntry {
|
||||
out_path: PathBuf,
|
||||
}
|
||||
|
||||
enum AndroidIconKind {
|
||||
Regular,
|
||||
Rounded,
|
||||
}
|
||||
|
||||
struct AndroidEntries {
|
||||
icon: Vec<(PngEntry, AndroidIconKind)>,
|
||||
foreground: Vec<PngEntry>,
|
||||
background: Vec<PngEntry>,
|
||||
monochrome: Vec<PngEntry>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct Manifest {
|
||||
default: String,
|
||||
bg_color: Option<String>,
|
||||
android_bg: Option<String>,
|
||||
android_fg: Option<String>,
|
||||
android_monochrome: Option<String>,
|
||||
android_fg_scale: Option<f32>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
#[clap(about = "Generate various icons for all major platforms")]
|
||||
pub struct Options {
|
||||
/// Path to the source icon (squared PNG or SVG file with transparency).
|
||||
/// Path to the source icon (squared PNG or SVG file with transparency) or a manifest file.
|
||||
///
|
||||
/// The manifest file is a JSON file with the following structure:
|
||||
/// {
|
||||
/// "default": "app-icon.png",
|
||||
/// "bg_color": "#fff",
|
||||
/// "android_bg": "app-icon-bg.png",
|
||||
/// "android_fg": "app-icon-fg.png",
|
||||
/// "android_fg_scale": 85,
|
||||
/// "android_monochrome": "app-icon-monochrome.png"
|
||||
/// }
|
||||
///
|
||||
/// All file paths defined in the manifest JSON are relative to the manifest file path.
|
||||
///
|
||||
/// Only the `default` manifest property is required.
|
||||
///
|
||||
/// The `bg_color` manifest value overwrites the `--ios-color` option if set.
|
||||
#[clap(default_value = "./app-icon.png")]
|
||||
input: PathBuf,
|
||||
/// Output directory.
|
||||
@ -60,6 +103,7 @@ pub struct Options {
|
||||
ios_color: String,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
enum Source {
|
||||
Svg(resvg::usvg::Tree),
|
||||
@ -81,7 +125,7 @@ impl Source {
|
||||
}
|
||||
}
|
||||
|
||||
fn resize_exact(&self, size: u32) -> Result<DynamicImage> {
|
||||
fn resize_exact(&self, size: u32) -> DynamicImage {
|
||||
match self {
|
||||
Self::Svg(svg) => {
|
||||
let mut pixmap = tiny_skia::Pixmap::new(size, size).unwrap();
|
||||
@ -91,14 +135,105 @@ impl Source {
|
||||
tiny_skia::Transform::from_scale(scale, scale),
|
||||
&mut pixmap.as_mut(),
|
||||
);
|
||||
let img_buffer = ImageBuffer::from_raw(size, size, pixmap.take()).unwrap();
|
||||
Ok(DynamicImage::ImageRgba8(img_buffer))
|
||||
// Switch to use `Pixmap::take_demultiplied` in the future when it's published
|
||||
// https://github.com/linebender/tiny-skia/blob/624257c0feb394bf6c4d0d688f8ea8030aae320f/src/pixmap.rs#L266
|
||||
let img_buffer = ImageBuffer::from_par_fn(size, size, |x, y| {
|
||||
let pixel = pixmap.pixel(x, y).unwrap().demultiply();
|
||||
Rgba([pixel.red(), pixel.green(), pixel.blue(), pixel.alpha()])
|
||||
});
|
||||
DynamicImage::ImageRgba8(img_buffer)
|
||||
}
|
||||
Self::DynamicImage(image) => {
|
||||
// image.resize_exact(size, size, FilterType::Lanczos3)
|
||||
resize_image(image, size, size)
|
||||
}
|
||||
Self::DynamicImage(i) => Ok(i.resize_exact(size, size, FilterType::Lanczos3)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// `image` does not use premultiplied alpha in resize, so we do it manually here,
|
||||
// see https://github.com/image-rs/image/issues/1655
|
||||
fn resize_image(image: &DynamicImage, new_width: u32, new_height: u32) -> DynamicImage {
|
||||
// Premultiply alpha
|
||||
let premultiplied_image = ImageBuffer::from_par_fn(image.width(), image.height(), |x, y| {
|
||||
let mut pixel = image.get_pixel(x, y);
|
||||
let alpha = pixel.0[3] as f32 / u8::MAX as f32;
|
||||
pixel.apply_without_alpha(|channel_value| (channel_value as f32 * alpha) as u8);
|
||||
pixel
|
||||
});
|
||||
|
||||
let mut resized = image::imageops::resize(
|
||||
&premultiplied_image,
|
||||
new_width,
|
||||
new_height,
|
||||
FilterType::Lanczos3,
|
||||
);
|
||||
|
||||
// Demultiply alpha
|
||||
resized.par_pixels_mut().for_each(|pixel| {
|
||||
let alpha = pixel.0[3] as f32 / u8::MAX as f32;
|
||||
pixel.apply_without_alpha(|channel_value| (channel_value as f32 / alpha) as u8);
|
||||
});
|
||||
|
||||
DynamicImage::ImageRgba8(resized)
|
||||
}
|
||||
|
||||
fn read_source(path: PathBuf) -> Result<Source> {
|
||||
if let Some(extension) = path.extension() {
|
||||
if extension == "svg" {
|
||||
let rtree = {
|
||||
let mut fontdb = usvg::fontdb::Database::new();
|
||||
fontdb.load_system_fonts();
|
||||
|
||||
let opt = usvg::Options {
|
||||
// Get file's absolute directory.
|
||||
resources_dir: std::fs::canonicalize(&path)
|
||||
.ok()
|
||||
.and_then(|p| p.parent().map(|p| p.to_path_buf())),
|
||||
fontdb: Arc::new(fontdb),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let svg_data = std::fs::read(&path).fs_context("Failed to read source icon", &path)?;
|
||||
usvg::Tree::from_data(&svg_data, &opt).unwrap()
|
||||
};
|
||||
|
||||
Ok(Source::Svg(rtree))
|
||||
} else {
|
||||
Ok(Source::DynamicImage(DynamicImage::ImageRgba8(
|
||||
open(&path)
|
||||
.context(format!(
|
||||
"failed to read and decode source image {}",
|
||||
path.display()
|
||||
))?
|
||||
.into_rgba8(),
|
||||
)))
|
||||
}
|
||||
} else {
|
||||
crate::error::bail!("Error loading image");
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_bg_color(bg_color_string: &String) -> Result<Rgba<u8>> {
|
||||
let bg_color = css_color::Srgb::from_str(bg_color_string)
|
||||
.map(|color| {
|
||||
Rgba([
|
||||
(color.red * 255.) as u8,
|
||||
(color.green * 255.) as u8,
|
||||
(color.blue * 255.) as u8,
|
||||
(color.alpha * 255.) as u8,
|
||||
])
|
||||
})
|
||||
.map_err(|_e| {
|
||||
Error::Context(
|
||||
format!("failed to parse color {bg_color_string}"),
|
||||
"invalid RGBA color".into(),
|
||||
)
|
||||
})?;
|
||||
|
||||
Ok(bg_color)
|
||||
}
|
||||
|
||||
pub fn command(options: Options) -> Result<()> {
|
||||
let input = options.input;
|
||||
let out_dir = options.output.unwrap_or_else(|| {
|
||||
@ -106,52 +241,34 @@ pub fn command(options: Options) -> Result<()> {
|
||||
tauri_dir().join("icons")
|
||||
});
|
||||
let png_icon_sizes = options.png.unwrap_or_default();
|
||||
let ios_color = css_color::Srgb::from_str(&options.ios_color)
|
||||
.map(|color| {
|
||||
Rgba([
|
||||
(color.red * 255.) as u8,
|
||||
(color.green * 255.) as u8,
|
||||
(color.blue * 255.) as u8,
|
||||
(color.alpha * 255.) as u8,
|
||||
])
|
||||
})
|
||||
.map_err(|_| anyhow::anyhow!("failed to parse iOS color"))?;
|
||||
|
||||
create_dir_all(&out_dir).context("Can't create output directory")?;
|
||||
create_dir_all(&out_dir).fs_context("Can't create output directory", &out_dir)?;
|
||||
|
||||
let source = if let Some(extension) = input.extension() {
|
||||
if extension == "svg" {
|
||||
let rtree = {
|
||||
let mut fontdb = usvg::fontdb::Database::new();
|
||||
fontdb.load_system_fonts();
|
||||
|
||||
let opt = usvg::Options {
|
||||
// Get file's absolute directory.
|
||||
resources_dir: std::fs::canonicalize(&input)
|
||||
.ok()
|
||||
.and_then(|p| p.parent().map(|p| p.to_path_buf())),
|
||||
fontdb: Arc::new(fontdb),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let svg_data = std::fs::read(&input).unwrap();
|
||||
usvg::Tree::from_data(&svg_data, &opt).unwrap()
|
||||
};
|
||||
|
||||
Source::Svg(rtree)
|
||||
} else {
|
||||
Source::DynamicImage(DynamicImage::ImageRgba8(
|
||||
open(&input)
|
||||
.context("Can't read and decode source image")?
|
||||
.into_rgba8(),
|
||||
))
|
||||
}
|
||||
let manifest = if input.extension().is_some_and(|ext| ext == "json") {
|
||||
parse_manifest(&input).map(Some)?
|
||||
} else {
|
||||
anyhow::bail!("Error loading image");
|
||||
None
|
||||
};
|
||||
|
||||
let bg_color_string = match manifest {
|
||||
Some(ref manifest) => manifest
|
||||
.bg_color
|
||||
.as_ref()
|
||||
.unwrap_or(&options.ios_color)
|
||||
.clone(),
|
||||
None => options.ios_color,
|
||||
};
|
||||
let bg_color = parse_bg_color(&bg_color_string)?;
|
||||
|
||||
let default_icon = match manifest {
|
||||
Some(ref manifest) => input.parent().unwrap().join(manifest.default.clone()),
|
||||
None => input.clone(),
|
||||
};
|
||||
|
||||
let source = read_source(default_icon)?;
|
||||
|
||||
if source.height() != source.width() {
|
||||
anyhow::bail!("Source image must be square");
|
||||
crate::error::bail!("Source image must be square");
|
||||
}
|
||||
|
||||
if png_icon_sizes.is_empty() {
|
||||
@ -159,38 +276,49 @@ pub fn command(options: Options) -> Result<()> {
|
||||
icns(&source, &out_dir).context("Failed to generate .icns file")?;
|
||||
ico(&source, &out_dir).context("Failed to generate .ico file")?;
|
||||
|
||||
png(&source, &out_dir, ios_color).context("Failed to generate png icons")?;
|
||||
png(&source, &out_dir, bg_color).context("Failed to generate png icons")?;
|
||||
android(&source, &input, manifest, &bg_color_string, &out_dir)
|
||||
.context("Failed to generate android icons")?;
|
||||
} else {
|
||||
for target in png_icon_sizes
|
||||
.into_iter()
|
||||
.map(|size| {
|
||||
let name = format!("{size}x{size}.png");
|
||||
let out_path = out_dir.join(&name);
|
||||
PngEntry {
|
||||
name,
|
||||
out_path,
|
||||
size,
|
||||
}
|
||||
})
|
||||
.collect::<Vec<PngEntry>>()
|
||||
{
|
||||
for target in png_icon_sizes.into_iter().map(|size| {
|
||||
let name = format!("{size}x{size}.png");
|
||||
let out_path = out_dir.join(&name);
|
||||
PngEntry {
|
||||
name,
|
||||
out_path,
|
||||
size,
|
||||
}
|
||||
}) {
|
||||
log::info!(action = "PNG"; "Creating {}", target.name);
|
||||
resize_and_save_png(&source, target.size, &target.out_path, None)?;
|
||||
resize_and_save_png(&source, target.size, &target.out_path, None, None)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn parse_manifest(manifest_path: &Path) -> Result<Manifest> {
|
||||
let manifest: Manifest = serde_json::from_str(
|
||||
&std::fs::read_to_string(manifest_path)
|
||||
.fs_context("cannot read manifest file", manifest_path)?,
|
||||
)
|
||||
.context(format!(
|
||||
"failed to parse manifest file {}",
|
||||
manifest_path.display()
|
||||
))?;
|
||||
log::debug!("Read manifest file from {}", manifest_path.display());
|
||||
Ok(manifest)
|
||||
}
|
||||
|
||||
fn appx(source: &Source, out_dir: &Path) -> Result<()> {
|
||||
log::info!(action = "Appx"; "Creating StoreLogo.png");
|
||||
resize_and_save_png(source, 50, &out_dir.join("StoreLogo.png"), None)?;
|
||||
resize_and_save_png(source, 50, &out_dir.join("StoreLogo.png"), None, None)?;
|
||||
|
||||
for size in [30, 44, 71, 89, 107, 142, 150, 284, 310] {
|
||||
let file_name = format!("Square{size}x{size}Logo.png");
|
||||
log::info!(action = "Appx"; "Creating {}", file_name);
|
||||
|
||||
resize_and_save_png(source, size, &out_dir.join(&file_name), None)?;
|
||||
resize_and_save_png(source, size, &out_dir.join(&file_name), None, None)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@ -204,27 +332,34 @@ fn icns(source: &Source, out_dir: &Path) -> Result<()> {
|
||||
|
||||
let mut family = IconFamily::new();
|
||||
|
||||
for (name, entry) in entries {
|
||||
for (_name, entry) in entries {
|
||||
let size = entry.size;
|
||||
let mut buf = Vec::new();
|
||||
|
||||
let image = source.resize_exact(size)?;
|
||||
let image = source.resize_exact(size);
|
||||
|
||||
write_png(image.as_bytes(), &mut buf, size)?;
|
||||
write_png(image.as_bytes(), &mut buf, size).context("failed to write output file")?;
|
||||
|
||||
let image = icns::Image::read_png(&buf[..])?;
|
||||
let image = icns::Image::read_png(&buf[..]).context("failed to read output file")?;
|
||||
|
||||
family
|
||||
.add_icon_with_type(
|
||||
&image,
|
||||
IconType::from_ostype(entry.ostype.parse().unwrap()).unwrap(),
|
||||
)
|
||||
.with_context(|| format!("Can't add {name} to Icns Family"))?;
|
||||
.context("failed to add icon to Icns Family")?;
|
||||
}
|
||||
|
||||
let mut out_file = BufWriter::new(File::create(out_dir.join("icon.icns"))?);
|
||||
family.write(&mut out_file)?;
|
||||
out_file.flush()?;
|
||||
let icns_path = out_dir.join("icon.icns");
|
||||
let mut out_file = BufWriter::new(
|
||||
File::create(&icns_path).fs_context("failed to create output file", &icns_path)?,
|
||||
);
|
||||
family
|
||||
.write(&mut out_file)
|
||||
.fs_context("failed to write output file", &icns_path)?;
|
||||
out_file
|
||||
.flush()
|
||||
.fs_context("failed to flush output file", &icns_path)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@ -236,70 +371,54 @@ fn ico(source: &Source, out_dir: &Path) -> Result<()> {
|
||||
let mut frames = Vec::new();
|
||||
|
||||
for size in [32, 16, 24, 48, 64, 256] {
|
||||
let image = source.resize_exact(size)?;
|
||||
let image = source.resize_exact(size);
|
||||
|
||||
// Only the 256px layer can be compressed according to the ico specs.
|
||||
if size == 256 {
|
||||
let mut buf = Vec::new();
|
||||
|
||||
write_png(image.as_bytes(), &mut buf, size)?;
|
||||
write_png(image.as_bytes(), &mut buf, size).context("failed to write output file")?;
|
||||
|
||||
frames.push(IcoFrame::with_encoded(
|
||||
buf,
|
||||
size,
|
||||
size,
|
||||
ExtendedColorType::Rgba8,
|
||||
)?)
|
||||
frames.push(
|
||||
IcoFrame::with_encoded(buf, size, size, ExtendedColorType::Rgba8)
|
||||
.context("failed to create ico frame")?,
|
||||
);
|
||||
} else {
|
||||
frames.push(IcoFrame::as_png(
|
||||
image.as_bytes(),
|
||||
size,
|
||||
size,
|
||||
ExtendedColorType::Rgba8,
|
||||
)?);
|
||||
frames.push(
|
||||
IcoFrame::as_png(image.as_bytes(), size, size, ExtendedColorType::Rgba8)
|
||||
.context("failed to create PNG frame")?,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
let mut out_file = BufWriter::new(File::create(out_dir.join("icon.ico"))?);
|
||||
let ico_path = out_dir.join("icon.ico");
|
||||
let mut out_file =
|
||||
BufWriter::new(File::create(&ico_path).fs_context("failed to create output file", &ico_path)?);
|
||||
let encoder = IcoEncoder::new(&mut out_file);
|
||||
encoder.encode_images(&frames)?;
|
||||
out_file.flush()?;
|
||||
encoder
|
||||
.encode_images(&frames)
|
||||
.context("failed to encode images")?;
|
||||
out_file
|
||||
.flush()
|
||||
.fs_context("failed to flush output file", &ico_path)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Generate .png files in 32x32, 64x64, 128x128, 256x256, 512x512 (icon.png)
|
||||
// Main target: Linux
|
||||
fn png(source: &Source, out_dir: &Path, ios_color: Rgba<u8>) -> Result<()> {
|
||||
fn desktop_entries(out_dir: &Path) -> Vec<PngEntry> {
|
||||
let mut entries = Vec::new();
|
||||
|
||||
for size in [32, 64, 128, 256, 512] {
|
||||
let file_name = match size {
|
||||
256 => "128x128@2x.png".to_string(),
|
||||
512 => "icon.png".to_string(),
|
||||
_ => format!("{size}x{size}.png"),
|
||||
};
|
||||
|
||||
entries.push(PngEntry {
|
||||
out_path: out_dir.join(&file_name),
|
||||
name: file_name,
|
||||
size,
|
||||
});
|
||||
}
|
||||
|
||||
entries
|
||||
}
|
||||
|
||||
fn android_entries(out_dir: &Path) -> Result<Vec<PngEntry>> {
|
||||
fn android(
|
||||
source: &Source,
|
||||
input: &Path,
|
||||
manifest: Option<Manifest>,
|
||||
bg_color: &String,
|
||||
out_dir: &Path,
|
||||
) -> Result<()> {
|
||||
fn android_entries(out_dir: &Path) -> Result<AndroidEntries> {
|
||||
struct AndroidEntry {
|
||||
name: &'static str,
|
||||
size: u32,
|
||||
foreground_size: u32,
|
||||
}
|
||||
|
||||
let mut entries = Vec::new();
|
||||
|
||||
let targets = vec![
|
||||
AndroidEntry {
|
||||
name: "hdpi",
|
||||
@ -327,31 +446,241 @@ fn png(source: &Source, out_dir: &Path, ios_color: Rgba<u8>) -> Result<()> {
|
||||
foreground_size: 432,
|
||||
},
|
||||
];
|
||||
let mut icon_entries = Vec::new();
|
||||
let mut fg_entries = Vec::new();
|
||||
let mut bg_entries = Vec::new();
|
||||
let mut monochrome_entries = Vec::new();
|
||||
|
||||
for target in targets {
|
||||
let folder_name = format!("mipmap-{}", target.name);
|
||||
let out_folder = out_dir.join(&folder_name);
|
||||
|
||||
create_dir_all(&out_folder).context("Can't create Android mipmap output directory")?;
|
||||
create_dir_all(&out_folder).fs_context(
|
||||
"failed to create Android mipmap output directory",
|
||||
&out_folder,
|
||||
)?;
|
||||
|
||||
entries.push(PngEntry {
|
||||
fg_entries.push(PngEntry {
|
||||
name: format!("{}/{}", folder_name, "ic_launcher_foreground.png"),
|
||||
out_path: out_folder.join("ic_launcher_foreground.png"),
|
||||
size: target.foreground_size,
|
||||
});
|
||||
entries.push(PngEntry {
|
||||
name: format!("{}/{}", folder_name, "ic_launcher_round.png"),
|
||||
out_path: out_folder.join("ic_launcher_round.png"),
|
||||
size: target.size,
|
||||
icon_entries.push((
|
||||
PngEntry {
|
||||
name: format!("{}/{}", folder_name, "ic_launcher_round.png"),
|
||||
out_path: out_folder.join("ic_launcher_round.png"),
|
||||
size: target.size,
|
||||
},
|
||||
AndroidIconKind::Rounded,
|
||||
));
|
||||
icon_entries.push((
|
||||
PngEntry {
|
||||
name: format!("{}/{}", folder_name, "ic_launcher.png"),
|
||||
out_path: out_folder.join("ic_launcher.png"),
|
||||
size: target.size,
|
||||
},
|
||||
AndroidIconKind::Regular,
|
||||
));
|
||||
|
||||
bg_entries.push(PngEntry {
|
||||
name: format!("{}/{}", folder_name, "ic_launcher_background.png"),
|
||||
out_path: out_folder.join("ic_launcher_background.png"),
|
||||
size: target.foreground_size,
|
||||
});
|
||||
entries.push(PngEntry {
|
||||
name: format!("{}/{}", folder_name, "ic_launcher.png"),
|
||||
out_path: out_folder.join("ic_launcher.png"),
|
||||
size: target.size,
|
||||
|
||||
monochrome_entries.push(PngEntry {
|
||||
name: format!("{}/{}", folder_name, "ic_launcher_monochrome.png"),
|
||||
out_path: out_folder.join("ic_launcher_monochrome.png"),
|
||||
size: target.foreground_size,
|
||||
});
|
||||
}
|
||||
|
||||
Ok(entries)
|
||||
Ok(AndroidEntries {
|
||||
icon: icon_entries,
|
||||
foreground: fg_entries,
|
||||
background: bg_entries,
|
||||
monochrome: monochrome_entries,
|
||||
})
|
||||
}
|
||||
fn create_color_file(out_dir: &Path, color: &String) -> Result<()> {
|
||||
let values_folder = out_dir.join("values");
|
||||
create_dir_all(&values_folder).fs_context(
|
||||
"Can't create Android values output directory",
|
||||
&values_folder,
|
||||
)?;
|
||||
let launcher_background_xml_path = values_folder.join("ic_launcher_background.xml");
|
||||
let mut color_file = File::create(&launcher_background_xml_path).fs_context(
|
||||
"failed to create Android color file",
|
||||
&launcher_background_xml_path,
|
||||
)?;
|
||||
color_file
|
||||
.write_all(
|
||||
format!(
|
||||
r#"<?xml version="1.0" encoding="utf-8"?>
|
||||
<resources>
|
||||
<color name="ic_launcher_background">{color}</color>
|
||||
</resources>"#,
|
||||
)
|
||||
.as_bytes(),
|
||||
)
|
||||
.fs_context(
|
||||
"failed to write Android color file",
|
||||
&launcher_background_xml_path,
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
let android_out = out_dir
|
||||
.parent()
|
||||
.unwrap()
|
||||
.join("gen/android/app/src/main/res/");
|
||||
let out = if android_out.exists() {
|
||||
android_out
|
||||
} else {
|
||||
let out = out_dir.join("android");
|
||||
create_dir_all(&out).fs_context("Can't create Android output directory", &out)?;
|
||||
out
|
||||
};
|
||||
let entries = android_entries(&out)?;
|
||||
|
||||
let fg_source = match manifest {
|
||||
Some(ref manifest) => {
|
||||
Some(read_source(input.parent().unwrap().join(
|
||||
manifest.android_fg.as_ref().unwrap_or(&manifest.default),
|
||||
))?)
|
||||
}
|
||||
None => None,
|
||||
};
|
||||
|
||||
for entry in entries.foreground {
|
||||
log::info!(action = "Android"; "Creating {}", entry.name);
|
||||
resize_and_save_png(
|
||||
fg_source.as_ref().unwrap_or(source),
|
||||
entry.size,
|
||||
&entry.out_path,
|
||||
None,
|
||||
None,
|
||||
)?;
|
||||
}
|
||||
|
||||
let mut bg_source = None;
|
||||
let mut has_monochrome_image = false;
|
||||
if let Some(ref manifest) = manifest {
|
||||
if let Some(ref background_path) = manifest.android_bg {
|
||||
let bg = read_source(input.parent().unwrap().join(background_path))?;
|
||||
for entry in entries.background {
|
||||
log::info!(action = "Android"; "Creating {}", entry.name);
|
||||
resize_and_save_png(&bg, entry.size, &entry.out_path, None, None)?;
|
||||
}
|
||||
bg_source.replace(bg);
|
||||
}
|
||||
if let Some(ref monochrome_path) = manifest.android_monochrome {
|
||||
has_monochrome_image = true;
|
||||
let mc = read_source(input.parent().unwrap().join(monochrome_path))?;
|
||||
for entry in entries.monochrome {
|
||||
log::info!(action = "Android"; "Creating {}", entry.name);
|
||||
resize_and_save_png(&mc, entry.size, &entry.out_path, None, None)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (entry, kind) in entries.icon {
|
||||
log::info!(action = "Android"; "Creating {}", entry.name);
|
||||
|
||||
let (margin, radius) = match kind {
|
||||
AndroidIconKind::Regular => {
|
||||
let radius = ((entry.size as f32) * 0.0833).round() as u32;
|
||||
(radius, radius)
|
||||
}
|
||||
AndroidIconKind::Rounded => {
|
||||
let margin = ((entry.size as f32) * 0.04).round() as u32;
|
||||
let radius = ((entry.size as f32) * 0.5).round() as u32;
|
||||
(margin, radius)
|
||||
}
|
||||
};
|
||||
|
||||
let image = if let (Some(bg_source), Some(fg_source)) = (bg_source.as_ref(), fg_source.as_ref())
|
||||
{
|
||||
resize_png(
|
||||
fg_source,
|
||||
entry.size,
|
||||
Some(Background::Image(bg_source)),
|
||||
manifest
|
||||
.as_ref()
|
||||
.and_then(|manifest| manifest.android_fg_scale),
|
||||
)?
|
||||
} else {
|
||||
resize_png(source, entry.size, None, None)?
|
||||
};
|
||||
|
||||
let image = apply_round_mask(&image, entry.size, margin, radius);
|
||||
|
||||
let mut out_file = BufWriter::new(
|
||||
File::create(&entry.out_path).fs_context("failed to create output file", &entry.out_path)?,
|
||||
);
|
||||
write_png(image.as_bytes(), &mut out_file, entry.size)
|
||||
.context("failed to write output file")?;
|
||||
out_file
|
||||
.flush()
|
||||
.fs_context("failed to flush output file", &entry.out_path)?;
|
||||
}
|
||||
|
||||
let mut launcher_content = r#"<?xml version="1.0" encoding="utf-8"?>
|
||||
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
|
||||
<foreground android:drawable="@mipmap/ic_launcher_foreground"/>"#
|
||||
.to_owned();
|
||||
|
||||
if bg_source.is_some() {
|
||||
launcher_content
|
||||
.push_str("\n <background android:drawable=\"@mipmap/ic_launcher_background\"/>");
|
||||
} else {
|
||||
create_color_file(&out, bg_color)?;
|
||||
launcher_content
|
||||
.push_str("\n <background android:drawable=\"@color/ic_launcher_background\"/>");
|
||||
}
|
||||
if has_monochrome_image {
|
||||
launcher_content
|
||||
.push_str("\n <monochrome android:drawable=\"@mipmap/ic_launcher_monochrome\"/>");
|
||||
}
|
||||
launcher_content.push_str("\n</adaptive-icon>");
|
||||
|
||||
let any_dpi_folder = out.join("mipmap-anydpi-v26");
|
||||
create_dir_all(&any_dpi_folder).fs_context(
|
||||
"Can't create Android mipmap-anydpi-v26 output directory",
|
||||
&any_dpi_folder,
|
||||
)?;
|
||||
|
||||
let launcher_xml_path = any_dpi_folder.join("ic_launcher.xml");
|
||||
let mut launcher_file = File::create(&launcher_xml_path)
|
||||
.fs_context("failed to create Android launcher file", &launcher_xml_path)?;
|
||||
launcher_file
|
||||
.write_all(launcher_content.as_bytes())
|
||||
.fs_context("failed to write Android launcher file", &launcher_xml_path)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Generate .png files in 32x32, 64x64, 128x128, 256x256, 512x512 (icon.png)
|
||||
// Main target: Linux
|
||||
fn png(source: &Source, out_dir: &Path, ios_color: Rgba<u8>) -> Result<()> {
|
||||
fn desktop_entries(out_dir: &Path) -> Vec<PngEntry> {
|
||||
let mut entries = Vec::new();
|
||||
|
||||
for size in [32, 64, 128, 256, 512] {
|
||||
let file_name = match size {
|
||||
256 => "128x128@2x.png".to_string(),
|
||||
512 => "icon.png".to_string(),
|
||||
_ => format!("{size}x{size}.png"),
|
||||
};
|
||||
|
||||
entries.push(PngEntry {
|
||||
out_path: out_dir.join(&file_name),
|
||||
name: file_name,
|
||||
size,
|
||||
});
|
||||
}
|
||||
|
||||
entries
|
||||
}
|
||||
|
||||
fn ios_entries(out_dir: &Path) -> Result<Vec<PngEntry>> {
|
||||
@ -428,20 +757,7 @@ fn png(source: &Source, out_dir: &Path, ios_color: Rgba<u8>) -> Result<()> {
|
||||
Ok(entries)
|
||||
}
|
||||
|
||||
let mut entries = desktop_entries(out_dir);
|
||||
|
||||
let android_out = out_dir
|
||||
.parent()
|
||||
.unwrap()
|
||||
.join("gen/android/app/src/main/res/");
|
||||
let out = if android_out.exists() {
|
||||
android_out
|
||||
} else {
|
||||
let out = out_dir.join("android");
|
||||
create_dir_all(&out).context("Can't create Android output directory")?;
|
||||
out
|
||||
};
|
||||
entries.extend(android_entries(&out)?);
|
||||
let entries = desktop_entries(out_dir);
|
||||
|
||||
let ios_out = out_dir
|
||||
.parent()
|
||||
@ -451,46 +767,221 @@ fn png(source: &Source, out_dir: &Path, ios_color: Rgba<u8>) -> Result<()> {
|
||||
ios_out
|
||||
} else {
|
||||
let out = out_dir.join("ios");
|
||||
create_dir_all(&out).context("Can't create iOS output directory")?;
|
||||
create_dir_all(&out).fs_context("failed to create iOS output directory", &out)?;
|
||||
out
|
||||
};
|
||||
|
||||
for entry in entries {
|
||||
log::info!(action = "PNG"; "Creating {}", entry.name);
|
||||
resize_and_save_png(source, entry.size, &entry.out_path, None)?;
|
||||
resize_and_save_png(source, entry.size, &entry.out_path, None, None)?;
|
||||
}
|
||||
|
||||
for entry in ios_entries(&out)? {
|
||||
log::info!(action = "iOS"; "Creating {}", entry.name);
|
||||
resize_and_save_png(source, entry.size, &entry.out_path, Some(ios_color))?;
|
||||
resize_and_save_png(
|
||||
source,
|
||||
entry.size,
|
||||
&entry.out_path,
|
||||
Some(Background::Color(ios_color)),
|
||||
None,
|
||||
)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
enum Background<'a> {
|
||||
Color(Rgba<u8>),
|
||||
Image(&'a Source),
|
||||
}
|
||||
|
||||
// Resize image.
|
||||
fn resize_png(
|
||||
source: &Source,
|
||||
size: u32,
|
||||
bg: Option<Background>,
|
||||
scale_percent: Option<f32>,
|
||||
) -> Result<DynamicImage> {
|
||||
let mut image = source.resize_exact(size);
|
||||
|
||||
match bg {
|
||||
Some(Background::Color(bg_color)) => {
|
||||
let mut bg_img = ImageBuffer::from_fn(size, size, |_, _| bg_color);
|
||||
|
||||
let fg = scale_percent
|
||||
.map(|scale| resize_asset(&image, size, scale))
|
||||
.unwrap_or(image);
|
||||
|
||||
image::imageops::overlay(&mut bg_img, &fg, 0, 0);
|
||||
image = bg_img.into();
|
||||
}
|
||||
Some(Background::Image(bg_source)) => {
|
||||
let mut bg = bg_source.resize_exact(size);
|
||||
|
||||
let fg = scale_percent
|
||||
.map(|scale| resize_asset(&image, size, scale))
|
||||
.unwrap_or(image);
|
||||
|
||||
image::imageops::overlay(&mut bg, &fg, 0, 0);
|
||||
image = bg;
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
|
||||
Ok(image)
|
||||
}
|
||||
|
||||
// Resize image and save it to disk.
|
||||
fn resize_and_save_png(
|
||||
source: &Source,
|
||||
size: u32,
|
||||
file_path: &Path,
|
||||
bg_color: Option<Rgba<u8>>,
|
||||
bg: Option<Background>,
|
||||
scale_percent: Option<f32>,
|
||||
) -> Result<()> {
|
||||
let mut image = source.resize_exact(size)?;
|
||||
|
||||
if let Some(bg_color) = bg_color {
|
||||
let mut bg_img = ImageBuffer::from_fn(size, size, |_, _| bg_color);
|
||||
image::imageops::overlay(&mut bg_img, &image, 0, 0);
|
||||
image = bg_img.into();
|
||||
}
|
||||
|
||||
let mut out_file = BufWriter::new(File::create(file_path)?);
|
||||
write_png(image.as_bytes(), &mut out_file, size)?;
|
||||
Ok(out_file.flush()?)
|
||||
let image = resize_png(source, size, bg, scale_percent)?;
|
||||
let mut out_file =
|
||||
BufWriter::new(File::create(file_path).fs_context("failed to create output file", file_path)?);
|
||||
write_png(image.as_bytes(), &mut out_file, size).context("failed to write output file")?;
|
||||
out_file
|
||||
.flush()
|
||||
.fs_context("failed to save output file", file_path)
|
||||
}
|
||||
|
||||
// Encode image data as png with compression.
|
||||
fn write_png<W: Write>(image_data: &[u8], w: W, size: u32) -> Result<()> {
|
||||
fn write_png<W: Write>(image_data: &[u8], w: W, size: u32) -> image::ImageResult<()> {
|
||||
let encoder = PngEncoder::new_with_quality(w, CompressionType::Best, PngFilterType::Adaptive);
|
||||
encoder.write_image(image_data, size, size, ExtendedColorType::Rgba8)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// finds the bounding box of non-transparent pixels in an RGBA image.
|
||||
fn content_bounds(img: &DynamicImage) -> Option<(u32, u32, u32, u32)> {
|
||||
let rgba = img.to_rgba8();
|
||||
let (width, height) = img.dimensions();
|
||||
|
||||
let mut min_x = width;
|
||||
let mut min_y = height;
|
||||
let mut max_x = 0;
|
||||
let mut max_y = 0;
|
||||
let mut found = false;
|
||||
|
||||
for y in 0..height {
|
||||
for x in 0..width {
|
||||
let a = rgba.get_pixel(x, y)[3];
|
||||
if a > 0 {
|
||||
found = true;
|
||||
if x < min_x {
|
||||
min_x = x;
|
||||
}
|
||||
if y < min_y {
|
||||
min_y = y;
|
||||
}
|
||||
if x > max_x {
|
||||
max_x = x;
|
||||
}
|
||||
if y > max_y {
|
||||
max_y = y;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if found {
|
||||
Some((min_x, min_y, max_x - min_x + 1, max_y - min_y + 1))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn resize_asset(img: &DynamicImage, target_size: u32, scale_percent: f32) -> DynamicImage {
|
||||
let cropped = if let Some((x, y, cw, ch)) = content_bounds(img) {
|
||||
// TODO: Use `&` here instead when we raise MSRV to above 1.79
|
||||
Cow::Owned(img.crop_imm(x, y, cw, ch))
|
||||
} else {
|
||||
Cow::Borrowed(img)
|
||||
};
|
||||
|
||||
let (cw, ch) = cropped.dimensions();
|
||||
let max_dim = cw.max(ch) as f32;
|
||||
let scale = (target_size as f32 * (scale_percent / 100.0)) / max_dim;
|
||||
|
||||
let new_w = (cw as f32 * scale).round() as u32;
|
||||
let new_h = (ch as f32 * scale).round() as u32;
|
||||
|
||||
let resized = resize_image(&cropped, new_w, new_h);
|
||||
|
||||
// Place on transparent square canvas
|
||||
let mut canvas = ImageBuffer::from_pixel(target_size, target_size, Rgba([0, 0, 0, 0]));
|
||||
let offset_x = if new_w > target_size {
|
||||
// Image wider than canvas → start at negative offset
|
||||
-((new_w - target_size) as i32 / 2)
|
||||
} else {
|
||||
(target_size - new_w) as i32 / 2
|
||||
};
|
||||
|
||||
let offset_y = if new_h > target_size {
|
||||
-((new_h - target_size) as i32 / 2)
|
||||
} else {
|
||||
(target_size - new_h) as i32 / 2
|
||||
};
|
||||
|
||||
image::imageops::overlay(&mut canvas, &resized, offset_x.into(), offset_y.into());
|
||||
|
||||
DynamicImage::ImageRgba8(canvas)
|
||||
}
|
||||
|
||||
fn apply_round_mask(
|
||||
img: &DynamicImage,
|
||||
target_size: u32,
|
||||
margin: u32,
|
||||
radius: u32,
|
||||
) -> DynamicImage {
|
||||
// Clamp radius to half of inner size
|
||||
let inner_size = target_size.saturating_sub(2 * margin);
|
||||
let radius = radius.min(inner_size / 2);
|
||||
|
||||
// Resize inner image to fit inside margins
|
||||
let resized = img.resize_exact(inner_size, inner_size, image::imageops::Lanczos3);
|
||||
|
||||
// Prepare output canvas
|
||||
let mut out = ImageBuffer::from_pixel(target_size, target_size, Rgba([0, 0, 0, 0]));
|
||||
|
||||
// Draw the resized image at (margin, margin)
|
||||
image::imageops::overlay(&mut out, &resized, margin as i64, margin as i64);
|
||||
|
||||
// Apply rounded corners
|
||||
for y in 0..target_size {
|
||||
for x in 0..target_size {
|
||||
let inside = if x >= margin + radius
|
||||
&& x < target_size - margin - radius
|
||||
&& y >= margin + radius
|
||||
&& y < target_size - margin - radius
|
||||
{
|
||||
true // inside central rectangle
|
||||
} else {
|
||||
// Determine corner centers
|
||||
let (cx, cy) = if x < margin + radius && y < margin + radius {
|
||||
(margin + radius, margin + radius) // top-left
|
||||
} else if x >= target_size - margin - radius && y < margin + radius {
|
||||
(target_size - margin - radius, margin + radius) // top-right
|
||||
} else if x < margin + radius && y >= target_size - margin - radius {
|
||||
(margin + radius, target_size - margin - radius) // bottom-left
|
||||
} else if x >= target_size - margin - radius && y >= target_size - margin - radius {
|
||||
(target_size - margin - radius, target_size - margin - radius) // bottom-right
|
||||
} else {
|
||||
continue; // edges that are not corners are inside
|
||||
};
|
||||
let dx = x as i32 - cx as i32;
|
||||
let dy = y as i32 - cy as i32;
|
||||
dx * dx + dy * dy <= (radius as i32 * radius as i32)
|
||||
};
|
||||
|
||||
if !inside {
|
||||
out.put_pixel(x, y, Rgba([0, 0, 0, 0]));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
DynamicImage::ImageRgba8(out)
|
||||
}
|
||||
|
||||
@ -3,6 +3,8 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
use super::{SectionItem, Status};
|
||||
#[cfg(windows)]
|
||||
use crate::error::Context;
|
||||
use colored::Colorize;
|
||||
#[cfg(windows)]
|
||||
use serde::Deserialize;
|
||||
@ -45,7 +47,11 @@ fn build_tools_version() -> crate::Result<Vec<String>> {
|
||||
"json",
|
||||
"-utf8",
|
||||
])
|
||||
.output()?;
|
||||
.output()
|
||||
.map_err(|error| crate::error::Error::CommandFailed {
|
||||
command: "vswhere -prerelease -products * -requires Microsoft.VisualStudio.Component.VC.Tools.x86.x64 -requires Microsoft.VisualStudio.Component.Windows10SDK.* -format json -utf8".to_string(),
|
||||
error,
|
||||
})?;
|
||||
|
||||
let output_sdk11 = Command::new(vswhere)
|
||||
.args([
|
||||
@ -60,19 +66,25 @@ fn build_tools_version() -> crate::Result<Vec<String>> {
|
||||
"json",
|
||||
"-utf8",
|
||||
])
|
||||
.output()?;
|
||||
.output()
|
||||
.map_err(|error| crate::error::Error::CommandFailed {
|
||||
command: "vswhere -prerelease -products * -requires Microsoft.VisualStudio.Component.VC.Tools.x86.x64 -requires Microsoft.VisualStudio.Component.Windows11SDK.* -format json -utf8".to_string(),
|
||||
error,
|
||||
})?;
|
||||
|
||||
let mut instances: Vec<VsInstanceInfo> = Vec::new();
|
||||
|
||||
if output_sdk10.status.success() {
|
||||
let stdout = String::from_utf8_lossy(&output_sdk10.stdout);
|
||||
let found: Vec<VsInstanceInfo> = serde_json::from_str(&stdout)?;
|
||||
let found: Vec<VsInstanceInfo> =
|
||||
serde_json::from_str(&stdout).context("failed to parse vswhere output")?;
|
||||
instances.extend(found);
|
||||
}
|
||||
|
||||
if output_sdk11.status.success() {
|
||||
let stdout = String::from_utf8_lossy(&output_sdk11.stdout);
|
||||
let found: Vec<VsInstanceInfo> = serde_json::from_str(&stdout)?;
|
||||
let found: Vec<VsInstanceInfo> =
|
||||
serde_json::from_str(&stdout).context("failed to parse vswhere output")?;
|
||||
instances.extend(found);
|
||||
}
|
||||
|
||||
@ -97,7 +109,11 @@ fn webview2_version() -> crate::Result<Option<String>> {
|
||||
let output = Command::new(&powershell_path)
|
||||
.args(["-NoProfile", "-Command"])
|
||||
.arg("Get-ItemProperty -Path 'HKLM:\\SOFTWARE\\WOW6432Node\\Microsoft\\EdgeUpdate\\Clients\\{F3017226-FE2A-4295-8BDF-00C3A9A7E4C5}' | ForEach-Object {$_.pv}")
|
||||
.output()?;
|
||||
.output()
|
||||
.map_err(|error| crate::error::Error::CommandFailed {
|
||||
command: "Get-ItemProperty -Path 'HKLM:\\SOFTWARE\\WOW6432Node\\Microsoft\\EdgeUpdate\\Clients\\{F3017226-FE2A-4295-8BDF-00C3A9A7E4C5}' | ForEach-Object {$_.pv}".to_string(),
|
||||
error,
|
||||
})?;
|
||||
if output.status.success() {
|
||||
return Ok(Some(
|
||||
String::from_utf8_lossy(&output.stdout).replace('\n', ""),
|
||||
@ -107,7 +123,11 @@ fn webview2_version() -> crate::Result<Option<String>> {
|
||||
let output = Command::new(&powershell_path)
|
||||
.args(["-NoProfile", "-Command"])
|
||||
.arg("Get-ItemProperty -Path 'HKLM:\\SOFTWARE\\Microsoft\\EdgeUpdate\\Clients\\{F3017226-FE2A-4295-8BDF-00C3A9A7E4C5}' | ForEach-Object {$_.pv}")
|
||||
.output()?;
|
||||
.output()
|
||||
.map_err(|error| crate::error::Error::CommandFailed {
|
||||
command: "Get-ItemProperty -Path 'HKLM:\\SOFTWARE\\Microsoft\\EdgeUpdate\\Clients\\{F3017226-FE2A-4295-8BDF-00C3A9A7E4C5}' | ForEach-Object {$_.pv}".to_string(),
|
||||
error,
|
||||
})?;
|
||||
if output.status.success() {
|
||||
return Ok(Some(
|
||||
String::from_utf8_lossy(&output.stdout).replace('\n', ""),
|
||||
@ -117,7 +137,11 @@ fn webview2_version() -> crate::Result<Option<String>> {
|
||||
let output = Command::new(&powershell_path)
|
||||
.args(["-NoProfile", "-Command"])
|
||||
.arg("Get-ItemProperty -Path 'HKCU:\\SOFTWARE\\Microsoft\\EdgeUpdate\\Clients\\{F3017226-FE2A-4295-8BDF-00C3A9A7E4C5}' | ForEach-Object {$_.pv}")
|
||||
.output()?;
|
||||
.output()
|
||||
.map_err(|error| crate::error::Error::CommandFailed {
|
||||
command: "Get-ItemProperty -Path 'HKCU:\\SOFTWARE\\Microsoft\\EdgeUpdate\\Clients\\{F3017226-FE2A-4295-8BDF-00C3A9A7E4C5}' | ForEach-Object {$_.pv}".to_string(),
|
||||
error,
|
||||
})?;
|
||||
if output.status.success() {
|
||||
return Ok(Some(
|
||||
String::from_utf8_lossy(&output.stdout).replace('\n', ""),
|
||||
|
||||
@ -3,6 +3,7 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
use crate::{
|
||||
error::Context,
|
||||
helpers::app_paths::{resolve_frontend_dir, resolve_tauri_dir},
|
||||
Result,
|
||||
};
|
||||
@ -37,7 +38,7 @@ pub struct VersionMetadata {
|
||||
|
||||
fn version_metadata() -> Result<VersionMetadata> {
|
||||
serde_json::from_str::<VersionMetadata>(include_str!("../../metadata-v2.json"))
|
||||
.map_err(Into::into)
|
||||
.context("failed to parse version metadata")
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Default)]
|
||||
@ -317,8 +318,17 @@ pub fn command(options: Options) -> Result<()> {
|
||||
.extend(app::items(frontend_dir.as_ref(), tauri_dir.as_deref()));
|
||||
|
||||
environment.display();
|
||||
|
||||
packages.display();
|
||||
|
||||
plugins.display();
|
||||
|
||||
if let (Some(frontend_dir), Some(tauri_dir)) = (&frontend_dir, &tauri_dir) {
|
||||
if let Err(error) = plugins::check_mismatched_packages(frontend_dir, tauri_dir) {
|
||||
println!("\n{}: {error}", "Error".bright_red().bold());
|
||||
}
|
||||
}
|
||||
|
||||
app.display();
|
||||
|
||||
// iOS
|
||||
|
||||
@ -8,7 +8,11 @@ use colored::Colorize;
|
||||
use serde::Deserialize;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::helpers::{cross_command, npm::PackageManager};
|
||||
use crate::error::Context;
|
||||
use crate::{
|
||||
error::Error,
|
||||
helpers::{cross_command, npm::PackageManager},
|
||||
};
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct YarnVersionInfo {
|
||||
@ -24,10 +28,15 @@ pub fn npm_latest_version(pm: &PackageManager, name: &str) -> crate::Result<Opti
|
||||
.arg("info")
|
||||
.arg(name)
|
||||
.args(["version", "--json"])
|
||||
.output()?;
|
||||
.output()
|
||||
.map_err(|error| Error::CommandFailed {
|
||||
command: "yarn info --json".to_string(),
|
||||
error,
|
||||
})?;
|
||||
if output.status.success() {
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
let info: YarnVersionInfo = serde_json::from_str(&stdout)?;
|
||||
let info: YarnVersionInfo =
|
||||
serde_json::from_str(&stdout).context("failed to parse yarn info")?;
|
||||
Ok(Some(info.data.last().unwrap().to_string()))
|
||||
} else {
|
||||
Ok(None)
|
||||
@ -41,10 +50,14 @@ pub fn npm_latest_version(pm: &PackageManager, name: &str) -> crate::Result<Opti
|
||||
.arg("info")
|
||||
.arg(name)
|
||||
.args(["--fields", "version", "--json"])
|
||||
.output()?;
|
||||
.output()
|
||||
.map_err(|error| Error::CommandFailed {
|
||||
command: "yarn npm info --fields version --json".to_string(),
|
||||
error,
|
||||
})?;
|
||||
if output.status.success() {
|
||||
let info: crate::PackageJson =
|
||||
serde_json::from_reader(std::io::Cursor::new(output.stdout)).unwrap();
|
||||
let info: crate::PackageJson = serde_json::from_reader(std::io::Cursor::new(output.stdout))
|
||||
.context("failed to parse yarn npm info")?;
|
||||
Ok(info.version)
|
||||
} else {
|
||||
Ok(None)
|
||||
@ -54,7 +67,15 @@ pub fn npm_latest_version(pm: &PackageManager, name: &str) -> crate::Result<Opti
|
||||
PackageManager::Npm | PackageManager::Deno | PackageManager::Bun => {
|
||||
let mut cmd = cross_command("npm");
|
||||
|
||||
let output = cmd.arg("show").arg(name).arg("version").output()?;
|
||||
let output = cmd
|
||||
.arg("show")
|
||||
.arg(name)
|
||||
.arg("version")
|
||||
.output()
|
||||
.map_err(|error| Error::CommandFailed {
|
||||
command: "npm show --version".to_string(),
|
||||
error,
|
||||
})?;
|
||||
if output.status.success() {
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
Ok(Some(stdout.replace('\n', "")))
|
||||
@ -65,7 +86,15 @@ pub fn npm_latest_version(pm: &PackageManager, name: &str) -> crate::Result<Opti
|
||||
PackageManager::Pnpm => {
|
||||
let mut cmd = cross_command("pnpm");
|
||||
|
||||
let output = cmd.arg("info").arg(name).arg("version").output()?;
|
||||
let output = cmd
|
||||
.arg("info")
|
||||
.arg(name)
|
||||
.arg("version")
|
||||
.output()
|
||||
.map_err(|error| Error::CommandFailed {
|
||||
command: "pnpm info --version".to_string(),
|
||||
error,
|
||||
})?;
|
||||
if output.status.success() {
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
Ok(Some(stdout.replace('\n', "")))
|
||||
@ -140,12 +169,12 @@ pub fn nodejs_section_item(
|
||||
.unwrap_or_default();
|
||||
|
||||
if version.is_empty() {
|
||||
format!("{} {}: not installed!", package, "".green())
|
||||
format!("{} {}: not installed!", package, " ⱼₛ".black().on_yellow())
|
||||
} else {
|
||||
format!(
|
||||
"{} {}: {}{}",
|
||||
package,
|
||||
"".dimmed(),
|
||||
" ⱼₛ".black().on_yellow(),
|
||||
version,
|
||||
if !(version.is_empty() || latest_ver.is_empty()) {
|
||||
let version = semver::Version::parse(version.as_str()).unwrap();
|
||||
|
||||
@ -8,14 +8,16 @@ use std::{
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use crate::helpers::{
|
||||
self,
|
||||
cargo_manifest::{cargo_manifest_and_lock, crate_version},
|
||||
npm::PackageManager,
|
||||
use crate::{
|
||||
helpers::{
|
||||
self,
|
||||
cargo_manifest::{cargo_manifest_and_lock, crate_version},
|
||||
npm::PackageManager,
|
||||
},
|
||||
Error,
|
||||
};
|
||||
|
||||
use super::{packages_nodejs, packages_rust, SectionItem};
|
||||
use anyhow::anyhow;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct InstalledPackage {
|
||||
@ -70,7 +72,10 @@ pub fn installed_tauri_packages(
|
||||
crate_version(tauri_dir, manifest.as_ref(), lock.as_ref(), crate_name).version?;
|
||||
let crate_version = semver::Version::parse(&crate_version)
|
||||
.inspect_err(|_| {
|
||||
log::error!("Failed to parse version `{crate_version}` for crate `{crate_name}`");
|
||||
// On first run there's no lockfile yet so we get the version requirement from Cargo.toml.
|
||||
// In our templates that's `2` which is not a valid semver version but a version requirement.
|
||||
// log::error confused users so we use log::debug to still be able to see this error if needed.
|
||||
log::debug!("Failed to parse version `{crate_version}` for crate `{crate_name}`");
|
||||
})
|
||||
.ok()?;
|
||||
Some((crate_name.clone(), crate_version))
|
||||
@ -161,5 +166,5 @@ pub fn check_mismatched_packages(frontend_dir: &Path, tauri_path: &Path) -> crat
|
||||
)
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
Err(anyhow!("Found version mismatched Tauri packages. Make sure the NPM and crate versions are on the same major/minor releases:\n{mismatched_text}"))
|
||||
Err(Error::GenericError(format!("Found version mismatched Tauri packages. Make sure the NPM package and Rust crate versions are on the same major/minor releases:\n{mismatched_text}")))
|
||||
}
|
||||
|
||||
@ -17,8 +17,10 @@ use std::{
|
||||
path::PathBuf,
|
||||
};
|
||||
|
||||
use crate::Result;
|
||||
use anyhow::Context;
|
||||
use crate::{
|
||||
error::{Context, ErrorExt},
|
||||
Result,
|
||||
};
|
||||
use clap::Parser;
|
||||
use handlebars::{to_json, Handlebars};
|
||||
use include_dir::{include_dir, Dir};
|
||||
@ -76,8 +78,10 @@ impl Options {
|
||||
let package_json_path = PathBuf::from(&self.directory).join("package.json");
|
||||
|
||||
let init_defaults = if package_json_path.exists() {
|
||||
let package_json_text = read_to_string(package_json_path)?;
|
||||
let package_json: crate::PackageJson = serde_json::from_str(&package_json_text)?;
|
||||
let package_json_text =
|
||||
read_to_string(&package_json_path).fs_context("failed to read", &package_json_path)?;
|
||||
let package_json: crate::PackageJson =
|
||||
serde_json::from_str(&package_json_text).context("failed to parse JSON")?;
|
||||
let (framework, _) = infer_framework(&package_json_text);
|
||||
InitDefaults {
|
||||
app_name: package_json.product_name.or(package_json.name),
|
||||
@ -187,7 +191,8 @@ pub fn command(mut options: Options) -> Result<()> {
|
||||
options = options.load()?;
|
||||
|
||||
let template_target_path = PathBuf::from(&options.directory).join("src-tauri");
|
||||
let metadata = serde_json::from_str::<VersionMetadata>(include_str!("../metadata-v2.json"))?;
|
||||
let metadata = serde_json::from_str::<VersionMetadata>(include_str!("../metadata-v2.json"))
|
||||
.context("failed to parse version metadata")?;
|
||||
|
||||
if template_target_path.exists() && !options.force {
|
||||
log::warn!(
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
use anyhow::Result;
|
||||
use crate::Result;
|
||||
use clap::{Parser, Subcommand};
|
||||
|
||||
use crate::interface::{AppInterface, AppSettings, Interface};
|
||||
|
||||
@ -11,16 +11,14 @@ use std::{
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
use crate::helpers::config::Config;
|
||||
use anyhow::Context;
|
||||
use crate::{error::Context, helpers::config::Config};
|
||||
use tauri_bundler::bundle::{PackageType, Settings, SettingsBuilder};
|
||||
|
||||
pub use rust::{MobileOptions, Options, Rust as AppInterface};
|
||||
pub use rust::{MobileOptions, Options, Rust as AppInterface, WatcherOptions};
|
||||
|
||||
pub trait DevProcess {
|
||||
fn kill(&self) -> std::io::Result<()>;
|
||||
fn try_wait(&self) -> std::io::Result<Option<ExitStatus>>;
|
||||
// TODO:
|
||||
#[allow(unused)]
|
||||
fn wait(&self) -> std::io::Result<ExitStatus>;
|
||||
#[allow(unused)]
|
||||
@ -36,7 +34,7 @@ pub trait AppSettings {
|
||||
features: &[String],
|
||||
) -> crate::Result<tauri_bundler::BundleSettings>;
|
||||
fn app_binary_path(&self, options: &Options) -> crate::Result<PathBuf>;
|
||||
fn get_binaries(&self) -> crate::Result<Vec<tauri_bundler::BundleBinary>>;
|
||||
fn get_binaries(&self, options: &Options) -> crate::Result<Vec<tauri_bundler::BundleBinary>>;
|
||||
fn app_name(&self) -> Option<String>;
|
||||
fn lib_name(&self) -> Option<String>;
|
||||
|
||||
@ -56,10 +54,10 @@ pub trait AppSettings {
|
||||
let target: String = if let Some(target) = options.target.clone() {
|
||||
target
|
||||
} else {
|
||||
tauri_utils::platform::target_triple()?
|
||||
tauri_utils::platform::target_triple().context("failed to get target triple")?
|
||||
};
|
||||
|
||||
let mut bins = self.get_binaries()?;
|
||||
let mut bins = self.get_binaries(&options)?;
|
||||
if let Some(main_binary_name) = &config.main_binary_name {
|
||||
let main = bins.iter_mut().find(|b| b.main()).context("no main bin?")?;
|
||||
main.set_name(main_binary_name.to_owned());
|
||||
@ -81,7 +79,10 @@ pub trait AppSettings {
|
||||
)
|
||||
}
|
||||
|
||||
settings_builder.build().map_err(Into::into)
|
||||
settings_builder
|
||||
.build()
|
||||
.map_err(Box::new)
|
||||
.map_err(Into::into)
|
||||
}
|
||||
}
|
||||
|
||||
@ -112,4 +113,9 @@ pub trait Interface: Sized {
|
||||
options: MobileOptions,
|
||||
runner: R,
|
||||
) -> crate::Result<()>;
|
||||
fn watch<R: Fn() -> crate::Result<Box<dyn DevProcess + Send>>>(
|
||||
&mut self,
|
||||
options: WatcherOptions,
|
||||
runner: R,
|
||||
) -> crate::Result<()>;
|
||||
}
|
||||
|
||||
@ -14,7 +14,6 @@ use std::{
|
||||
time::Duration,
|
||||
};
|
||||
|
||||
use anyhow::Context;
|
||||
use dunce::canonicalize;
|
||||
use glob::glob;
|
||||
use ignore::gitignore::{Gitignore, GitignoreBuilder};
|
||||
@ -30,6 +29,7 @@ use tauri_utils::config::{parse::is_configuration_file, DeepLinkProtocol, Runner
|
||||
|
||||
use super::{AppSettings, DevProcess, ExitReason, Interface};
|
||||
use crate::{
|
||||
error::{Context, Error, ErrorExt},
|
||||
helpers::{
|
||||
app_paths::{frontend_dir, tauri_dir},
|
||||
config::{nsis_settings, reload as reload_config, wix_settings, BundleResources, Config},
|
||||
@ -115,6 +115,12 @@ pub struct MobileOptions {
|
||||
pub additional_watch_folders: Vec<PathBuf>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct WatcherOptions {
|
||||
pub config: Vec<ConfigValue>,
|
||||
pub additional_watch_folders: Vec<PathBuf>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct RustupTarget {
|
||||
name: String,
|
||||
@ -140,7 +146,14 @@ impl Interface for Rust {
|
||||
}
|
||||
})
|
||||
.unwrap();
|
||||
watcher.watch(tauri_dir().join("Cargo.toml"), RecursiveMode::NonRecursive)?;
|
||||
watcher
|
||||
.watch(tauri_dir().join("Cargo.toml"), RecursiveMode::NonRecursive)
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"failed to watch {}",
|
||||
tauri_dir().join("Cargo.toml").display()
|
||||
)
|
||||
})?;
|
||||
let (manifest, modified) = rewrite_manifest(config)?;
|
||||
if modified {
|
||||
// Wait for the modified event so we don't trigger a re-build later on
|
||||
@ -202,8 +215,8 @@ impl Interface for Rust {
|
||||
if options.no_watch {
|
||||
let (tx, rx) = sync_channel(1);
|
||||
self.run_dev(options, run_args, move |status, reason| {
|
||||
on_exit(status, reason);
|
||||
tx.send(()).unwrap();
|
||||
on_exit(status, reason)
|
||||
})?;
|
||||
|
||||
rx.recv().unwrap();
|
||||
@ -238,12 +251,26 @@ impl Interface for Rust {
|
||||
runner(options)?;
|
||||
Ok(())
|
||||
} else {
|
||||
let merge_configs = options.config.iter().map(|c| &c.0).collect::<Vec<_>>();
|
||||
let run = Arc::new(|_rust: &mut Rust| runner(options.clone()));
|
||||
self.run_dev_watcher(&options.additional_watch_folders, &merge_configs, run)
|
||||
self.watch(
|
||||
WatcherOptions {
|
||||
config: options.config.clone(),
|
||||
additional_watch_folders: options.additional_watch_folders.clone(),
|
||||
},
|
||||
move || runner(options.clone()),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn watch<R: Fn() -> crate::Result<Box<dyn DevProcess + Send>>>(
|
||||
&mut self,
|
||||
options: WatcherOptions,
|
||||
runner: R,
|
||||
) -> crate::Result<()> {
|
||||
let merge_configs = options.config.iter().map(|c| &c.0).collect::<Vec<_>>();
|
||||
let run = Arc::new(|_rust: &mut Rust| runner());
|
||||
self.run_dev_watcher(&options.additional_watch_folders, &merge_configs, run)
|
||||
}
|
||||
|
||||
fn env(&self) -> HashMap<&str, String> {
|
||||
let mut env = HashMap::new();
|
||||
env.insert(
|
||||
@ -411,9 +438,9 @@ fn dev_options(
|
||||
// Copied from https://github.com/rust-lang/cargo/blob/69255bb10de7f74511b5cef900a9d102247b6029/src/cargo/core/workspace.rs#L665
|
||||
fn expand_member_path(path: &Path) -> crate::Result<Vec<PathBuf>> {
|
||||
let path = path.to_str().context("path is not UTF-8 compatible")?;
|
||||
let res = glob(path).with_context(|| format!("could not parse pattern `{path}`"))?;
|
||||
let res = glob(path).with_context(|| format!("failed to expand glob pattern for {path}"))?;
|
||||
let res = res
|
||||
.map(|p| p.with_context(|| format!("unable to match path to pattern `{path}`")))
|
||||
.map(|p| p.with_context(|| format!("failed to expand glob pattern for {path}")))
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
Ok(res)
|
||||
}
|
||||
@ -574,7 +601,7 @@ impl Rust {
|
||||
);
|
||||
|
||||
let mut p = process.lock().unwrap();
|
||||
p.kill().with_context(|| "failed to kill app process")?;
|
||||
p.kill().context("failed to kill app process")?;
|
||||
|
||||
// wait for the process to exit
|
||||
// note that on mobile, kill() already waits for the process to exit (duct implementation)
|
||||
@ -622,18 +649,19 @@ impl<T> MaybeWorkspace<T> {
|
||||
fn resolve(
|
||||
self,
|
||||
label: &str,
|
||||
get_ws_field: impl FnOnce() -> anyhow::Result<T>,
|
||||
) -> anyhow::Result<T> {
|
||||
get_ws_field: impl FnOnce() -> crate::Result<T>,
|
||||
) -> crate::Result<T> {
|
||||
match self {
|
||||
MaybeWorkspace::Defined(value) => Ok(value),
|
||||
MaybeWorkspace::Workspace(TomlWorkspaceField { workspace: true }) => {
|
||||
get_ws_field().context(format!(
|
||||
"error inheriting `{label}` from workspace root manifest's `workspace.package.{label}`"
|
||||
))
|
||||
}
|
||||
MaybeWorkspace::Workspace(TomlWorkspaceField { workspace: false }) => Err(anyhow::anyhow!(
|
||||
"`workspace=false` is unsupported for `package.{label}`"
|
||||
)),
|
||||
MaybeWorkspace::Workspace(TomlWorkspaceField { workspace: true }) => get_ws_field()
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"error inheriting `{label}` from workspace root manifest's `workspace.package.{label}`"
|
||||
)
|
||||
}),
|
||||
MaybeWorkspace::Workspace(TomlWorkspaceField { workspace: false }) => Err(
|
||||
crate::Error::GenericError("`workspace=false` is unsupported for `package.{label}`".into()),
|
||||
),
|
||||
}
|
||||
}
|
||||
fn _as_defined(&self) -> Option<&T> {
|
||||
@ -667,11 +695,13 @@ struct WorkspacePackageSettings {
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
struct BinarySettings {
|
||||
name: String,
|
||||
/// This is from nightly: https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#different-binary-name
|
||||
filename: Option<String>,
|
||||
path: Option<String>,
|
||||
required_features: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
impl BinarySettings {
|
||||
@ -721,8 +751,11 @@ impl CargoSettings {
|
||||
fn load(dir: &Path) -> crate::Result<Self> {
|
||||
let toml_path = dir.join("Cargo.toml");
|
||||
let toml_str = std::fs::read_to_string(&toml_path)
|
||||
.with_context(|| format!("Failed to read {}", toml_path.display()))?;
|
||||
toml::from_str(&toml_str).with_context(|| format!("Failed to parse {}", toml_path.display()))
|
||||
.fs_context("Failed to read Cargo manifest", toml_path.clone())?;
|
||||
toml::from_str(&toml_str).context(format!(
|
||||
"failed to parse Cargo manifest at {}",
|
||||
toml_path.display()
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
@ -754,7 +787,7 @@ pub struct UpdaterConfig {
|
||||
}
|
||||
|
||||
/// Install modes for the Windows update.
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
#[derive(Default, Debug, PartialEq, Eq, Clone)]
|
||||
pub enum WindowsUpdateInstallMode {
|
||||
/// Specifies there's a basic UI during the installation process, including a final dialog box at the end.
|
||||
BasicUi,
|
||||
@ -762,17 +795,12 @@ pub enum WindowsUpdateInstallMode {
|
||||
/// Requires admin privileges if the installer does.
|
||||
Quiet,
|
||||
/// Specifies unattended mode, which means the installation only shows a progress bar.
|
||||
#[default]
|
||||
Passive,
|
||||
// to add more modes, we need to check if the updater relaunch makes sense
|
||||
// i.e. for a full UI mode, the user can also mark the installer to start the app
|
||||
}
|
||||
|
||||
impl Default for WindowsUpdateInstallMode {
|
||||
fn default() -> Self {
|
||||
Self::Passive
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for WindowsUpdateInstallMode {
|
||||
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
|
||||
where
|
||||
@ -831,11 +859,10 @@ impl AppSettings for RustAppSettings {
|
||||
.plugins
|
||||
.0
|
||||
.get("updater")
|
||||
.ok_or_else(|| {
|
||||
anyhow::anyhow!("failed to get updater configuration: plugins > updater doesn't exist")
|
||||
})?
|
||||
.context("failed to get updater configuration: plugins > updater doesn't exist")?
|
||||
.clone(),
|
||||
)?;
|
||||
)
|
||||
.context("failed to parse updater plugin configuration")?;
|
||||
Some(UpdaterSettings {
|
||||
v1_compatible,
|
||||
pubkey: updater.pubkey,
|
||||
@ -848,7 +875,7 @@ impl AppSettings for RustAppSettings {
|
||||
let mut settings = tauri_config_to_bundle_settings(
|
||||
self,
|
||||
features,
|
||||
config.identifier.clone(),
|
||||
config,
|
||||
config.bundle.clone(),
|
||||
updater_settings,
|
||||
arch64bits,
|
||||
@ -862,7 +889,8 @@ impl AppSettings for RustAppSettings {
|
||||
.get("deep-link")
|
||||
.and_then(|c| c.get("desktop").cloned())
|
||||
{
|
||||
let protocols: DesktopDeepLinks = serde_json::from_value(plugin_config)?;
|
||||
let protocols: DesktopDeepLinks =
|
||||
serde_json::from_value(plugin_config).context("failed to parse desktop deep links from Tauri configuration > plugins > deep-link > desktop")?;
|
||||
settings.deep_link_protocols = Some(match protocols {
|
||||
DesktopDeepLinks::One(p) => vec![p],
|
||||
DesktopDeepLinks::List(p) => p,
|
||||
@ -893,7 +921,7 @@ impl AppSettings for RustAppSettings {
|
||||
}
|
||||
|
||||
fn app_binary_path(&self, options: &Options) -> crate::Result<PathBuf> {
|
||||
let binaries = self.get_binaries()?;
|
||||
let binaries = self.get_binaries(options)?;
|
||||
let bin_name = binaries
|
||||
.iter()
|
||||
.find(|x| x.main())
|
||||
@ -919,8 +947,8 @@ impl AppSettings for RustAppSettings {
|
||||
Ok(path)
|
||||
}
|
||||
|
||||
fn get_binaries(&self) -> crate::Result<Vec<BundleBinary>> {
|
||||
let mut binaries: Vec<BundleBinary> = vec![];
|
||||
fn get_binaries(&self, options: &Options) -> crate::Result<Vec<BundleBinary>> {
|
||||
let mut binaries = Vec::new();
|
||||
|
||||
if let Some(bins) = &self.cargo_settings.bin {
|
||||
let default_run = self
|
||||
@ -929,6 +957,14 @@ impl AppSettings for RustAppSettings {
|
||||
.clone()
|
||||
.unwrap_or_default();
|
||||
for bin in bins {
|
||||
if let (Some(req_features), Some(opt_features)) =
|
||||
(&bin.required_features, &options.features)
|
||||
{
|
||||
// Check if all required features are enabled.
|
||||
if !req_features.iter().all(|feat| opt_features.contains(feat)) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
let file_name = bin.file_name();
|
||||
let is_main = file_name == self.cargo_package_settings.name || file_name == default_run;
|
||||
binaries.push(BundleBinary::with_path(
|
||||
@ -1034,18 +1070,18 @@ impl AppSettings for RustAppSettings {
|
||||
impl RustAppSettings {
|
||||
pub fn new(config: &Config, manifest: Manifest, target: Option<String>) -> crate::Result<Self> {
|
||||
let tauri_dir = tauri_dir();
|
||||
let cargo_settings = CargoSettings::load(tauri_dir).context("failed to load cargo settings")?;
|
||||
let cargo_settings = CargoSettings::load(tauri_dir).context("failed to load Cargo settings")?;
|
||||
let cargo_package_settings = match &cargo_settings.package {
|
||||
Some(package_info) => package_info.clone(),
|
||||
None => {
|
||||
return Err(anyhow::anyhow!(
|
||||
return Err(crate::Error::GenericError(
|
||||
"No package info in the config file".to_owned(),
|
||||
))
|
||||
}
|
||||
};
|
||||
|
||||
let ws_package_settings = CargoSettings::load(&get_workspace_dir()?)
|
||||
.context("failed to load cargo settings from workspace root")?
|
||||
.context("failed to load Cargo settings from workspace root")?
|
||||
.workspace
|
||||
.and_then(|v| v.package);
|
||||
|
||||
@ -1058,7 +1094,7 @@ impl RustAppSettings {
|
||||
ws_package_settings
|
||||
.as_ref()
|
||||
.and_then(|p| p.version.clone())
|
||||
.ok_or_else(|| anyhow::anyhow!("Couldn't inherit value for `version` from workspace"))
|
||||
.context("Couldn't inherit value for `version` from workspace")
|
||||
})
|
||||
.expect("Cargo project does not have a version")
|
||||
});
|
||||
@ -1078,9 +1114,7 @@ impl RustAppSettings {
|
||||
ws_package_settings
|
||||
.as_ref()
|
||||
.and_then(|v| v.description.clone())
|
||||
.ok_or_else(|| {
|
||||
anyhow::anyhow!("Couldn't inherit value for `description` from workspace")
|
||||
})
|
||||
.context("Couldn't inherit value for `description` from workspace")
|
||||
})
|
||||
.unwrap()
|
||||
})
|
||||
@ -1091,9 +1125,7 @@ impl RustAppSettings {
|
||||
ws_package_settings
|
||||
.as_ref()
|
||||
.and_then(|v| v.homepage.clone())
|
||||
.ok_or_else(|| {
|
||||
anyhow::anyhow!("Couldn't inherit value for `homepage` from workspace")
|
||||
})
|
||||
.context("Couldn't inherit value for `homepage` from workspace")
|
||||
})
|
||||
.unwrap()
|
||||
}),
|
||||
@ -1103,7 +1135,7 @@ impl RustAppSettings {
|
||||
ws_package_settings
|
||||
.as_ref()
|
||||
.and_then(|v| v.authors.clone())
|
||||
.ok_or_else(|| anyhow::anyhow!("Couldn't inherit value for `authors` from workspace"))
|
||||
.context("Couldn't inherit value for `authors` from workspace")
|
||||
})
|
||||
.unwrap()
|
||||
}),
|
||||
@ -1168,16 +1200,20 @@ pub(crate) fn get_cargo_metadata() -> crate::Result<CargoMetadata> {
|
||||
let output = Command::new("cargo")
|
||||
.args(["metadata", "--no-deps", "--format-version", "1"])
|
||||
.current_dir(tauri_dir())
|
||||
.output()?;
|
||||
.output()
|
||||
.map_err(|error| Error::CommandFailed {
|
||||
command: "cargo metadata --no-deps --format-version 1".to_string(),
|
||||
error,
|
||||
})?;
|
||||
|
||||
if !output.status.success() {
|
||||
return Err(anyhow::anyhow!(
|
||||
"cargo metadata command exited with a non zero exit code: {}",
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
));
|
||||
return Err(Error::CommandFailed {
|
||||
command: "cargo metadata".to_string(),
|
||||
error: std::io::Error::other(String::from_utf8_lossy(&output.stderr)),
|
||||
});
|
||||
}
|
||||
|
||||
Ok(serde_json::from_slice(&output.stdout)?)
|
||||
serde_json::from_slice(&output.stdout).context("failed to parse cargo metadata")
|
||||
}
|
||||
|
||||
/// Get the cargo target directory based on the provided arguments.
|
||||
@ -1185,10 +1221,12 @@ pub(crate) fn get_cargo_metadata() -> crate::Result<CargoMetadata> {
|
||||
/// Otherwise, use the target directory from cargo metadata.
|
||||
pub(crate) fn get_cargo_target_dir(args: &[String]) -> crate::Result<PathBuf> {
|
||||
let path = if let Some(target) = get_cargo_option(args, "--target-dir") {
|
||||
std::env::current_dir()?.join(target)
|
||||
std::env::current_dir()
|
||||
.context("failed to get current directory")?
|
||||
.join(target)
|
||||
} else {
|
||||
get_cargo_metadata()
|
||||
.with_context(|| "failed to run 'cargo metadata' command to get target directory")?
|
||||
.context("failed to run 'cargo metadata' command to get target directory")?
|
||||
.target_directory
|
||||
};
|
||||
|
||||
@ -1250,7 +1288,7 @@ pub fn get_profile_dir(options: &Options) -> &str {
|
||||
fn tauri_config_to_bundle_settings(
|
||||
settings: &RustAppSettings,
|
||||
features: &[String],
|
||||
identifier: String,
|
||||
tauri_config: &Config,
|
||||
config: crate::helpers::config::BundleConfig,
|
||||
updater_config: Option<UpdaterSettings>,
|
||||
arch64bits: bool,
|
||||
@ -1373,8 +1411,59 @@ fn tauri_config_to_bundle_settings(
|
||||
BundleResources::Map(map) => (None, Some(map)),
|
||||
};
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
let entitlements = if let Some(plugin_config) = tauri_config
|
||||
.plugins
|
||||
.0
|
||||
.get("deep-link")
|
||||
.and_then(|c| c.get("desktop").cloned())
|
||||
{
|
||||
let protocols: DesktopDeepLinks =
|
||||
serde_json::from_value(plugin_config).context("failed to parse deep link plugin config")?;
|
||||
let domains = match protocols {
|
||||
DesktopDeepLinks::One(protocol) => protocol.domains,
|
||||
DesktopDeepLinks::List(protocols) => protocols.into_iter().flat_map(|p| p.domains).collect(),
|
||||
};
|
||||
|
||||
if domains.is_empty() {
|
||||
config
|
||||
.macos
|
||||
.entitlements
|
||||
.map(PathBuf::from)
|
||||
.map(tauri_bundler::bundle::Entitlements::Path)
|
||||
} else {
|
||||
let mut app_links_entitlements = plist::Dictionary::new();
|
||||
app_links_entitlements.insert(
|
||||
"com.apple.developer.associated-domains".to_string(),
|
||||
domains
|
||||
.into_iter()
|
||||
.map(|domain| format!("applinks:{domain}").into())
|
||||
.collect::<Vec<_>>()
|
||||
.into(),
|
||||
);
|
||||
let entitlements = if let Some(user_provided_entitlements) = config.macos.entitlements {
|
||||
crate::helpers::plist::merge_plist(vec![
|
||||
PathBuf::from(user_provided_entitlements).into(),
|
||||
plist::Value::Dictionary(app_links_entitlements).into(),
|
||||
])?
|
||||
} else {
|
||||
app_links_entitlements.into()
|
||||
};
|
||||
|
||||
Some(tauri_bundler::bundle::Entitlements::Plist(entitlements))
|
||||
}
|
||||
} else {
|
||||
config
|
||||
.macos
|
||||
.entitlements
|
||||
.map(PathBuf::from)
|
||||
.map(tauri_bundler::bundle::Entitlements::Path)
|
||||
};
|
||||
#[cfg(not(target_os = "macos"))]
|
||||
let entitlements = None;
|
||||
|
||||
Ok(BundleSettings {
|
||||
identifier: Some(identifier),
|
||||
identifier: Some(tauri_config.identifier.clone()),
|
||||
publisher: config.publisher,
|
||||
homepage: config.homepage,
|
||||
icon: Some(config.icon),
|
||||
@ -1383,8 +1472,8 @@ fn tauri_config_to_bundle_settings(
|
||||
copyright: config.copyright,
|
||||
category: match config.category {
|
||||
Some(category) => Some(AppCategory::from_str(&category).map_err(|e| match e {
|
||||
Some(e) => anyhow::anyhow!("invalid category, did you mean `{}`?", e),
|
||||
None => anyhow::anyhow!("invalid category"),
|
||||
Some(e) => Error::GenericError(format!("invalid category, did you mean `{e}`?")),
|
||||
None => Error::GenericError("invalid category".to_string()),
|
||||
})?),
|
||||
None => None,
|
||||
},
|
||||
@ -1474,14 +1563,24 @@ fn tauri_config_to_bundle_settings(
|
||||
skip_stapling: false,
|
||||
hardened_runtime: config.macos.hardened_runtime,
|
||||
provider_short_name,
|
||||
entitlements: config.macos.entitlements,
|
||||
info_plist_path: {
|
||||
entitlements,
|
||||
#[cfg(not(target_os = "macos"))]
|
||||
info_plist: None,
|
||||
#[cfg(target_os = "macos")]
|
||||
info_plist: {
|
||||
let mut src_plists = vec![];
|
||||
|
||||
let path = tauri_dir().join("Info.plist");
|
||||
if path.exists() {
|
||||
Some(path)
|
||||
} else {
|
||||
None
|
||||
src_plists.push(path.into());
|
||||
}
|
||||
if let Some(info_plist) = &config.macos.info_plist {
|
||||
src_plists.push(info_plist.clone().into());
|
||||
}
|
||||
|
||||
Some(tauri_bundler::bundle::PlistKind::Plist(
|
||||
crate::helpers::plist::merge_plist(src_plists)?,
|
||||
))
|
||||
},
|
||||
},
|
||||
windows: WindowsSettings {
|
||||
@ -1508,9 +1607,7 @@ fn tauri_config_to_bundle_settings(
|
||||
.cargo_ws_package_settings
|
||||
.as_ref()
|
||||
.and_then(|v| v.license.clone())
|
||||
.ok_or_else(|| {
|
||||
anyhow::anyhow!("Couldn't inherit value for `license` from workspace")
|
||||
})
|
||||
.context("Couldn't inherit value for `license` from workspace")
|
||||
})
|
||||
.unwrap()
|
||||
})
|
||||
@ -1572,7 +1669,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn parse_cargo_option() {
|
||||
let args = vec![
|
||||
let args = [
|
||||
"build".into(),
|
||||
"--".into(),
|
||||
"--profile".into(),
|
||||
|
||||
@ -2,7 +2,6 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use serde::Deserialize;
|
||||
use std::{
|
||||
fs,
|
||||
@ -11,6 +10,11 @@ use std::{
|
||||
|
||||
use tauri_utils::display_path;
|
||||
|
||||
use crate::{
|
||||
error::{Context, ErrorExt},
|
||||
Result,
|
||||
};
|
||||
|
||||
struct PathAncestors<'a> {
|
||||
current: Option<&'a Path>,
|
||||
}
|
||||
@ -57,18 +61,12 @@ impl Config {
|
||||
let mut config = Self::default();
|
||||
|
||||
let get_config = |path: PathBuf| -> Result<ConfigSchema> {
|
||||
let contents = fs::read_to_string(&path).with_context(|| {
|
||||
format!(
|
||||
"failed to read configuration file `{}`",
|
||||
display_path(&path)
|
||||
)
|
||||
})?;
|
||||
toml::from_str(&contents).with_context(|| {
|
||||
format!(
|
||||
"could not parse TOML configuration in `{}`",
|
||||
display_path(&path)
|
||||
)
|
||||
})
|
||||
let contents =
|
||||
fs::read_to_string(&path).fs_context("failed to read configuration file", path.clone())?;
|
||||
toml::from_str(&contents).context(format!(
|
||||
"could not parse TOML configuration in `{}`",
|
||||
display_path(&path)
|
||||
))
|
||||
};
|
||||
|
||||
for current in PathAncestors::new(path) {
|
||||
|
||||
@ -3,9 +3,11 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
use super::{AppSettings, DevProcess, ExitReason, Options, RustAppSettings, RustupTarget};
|
||||
use crate::CommandExt;
|
||||
use crate::{
|
||||
error::{Context, ErrorExt},
|
||||
CommandExt, Error,
|
||||
};
|
||||
|
||||
use anyhow::Context;
|
||||
use shared_child::SharedChild;
|
||||
use std::{
|
||||
fs,
|
||||
@ -72,8 +74,7 @@ pub fn run_dev<F: Fn(Option<i32>, ExitReason) + Send + Sync + 'static>(
|
||||
dev_cmd.arg("--color");
|
||||
dev_cmd.arg("always");
|
||||
|
||||
// TODO: double check this
|
||||
dev_cmd.stdout(os_pipe::dup_stdout()?);
|
||||
dev_cmd.stdout(os_pipe::dup_stdout().unwrap());
|
||||
dev_cmd.stderr(Stdio::piped());
|
||||
|
||||
dev_cmd.arg("--");
|
||||
@ -86,16 +87,18 @@ pub fn run_dev<F: Fn(Option<i32>, ExitReason) + Send + Sync + 'static>(
|
||||
|
||||
let dev_child = match SharedChild::spawn(&mut dev_cmd) {
|
||||
Ok(c) => Ok(c),
|
||||
Err(e) if e.kind() == ErrorKind::NotFound => Err(anyhow::anyhow!(
|
||||
"`{}` command not found.{}",
|
||||
runner,
|
||||
Err(e) if e.kind() == ErrorKind::NotFound => crate::error::bail!(
|
||||
"`{runner}` command not found.{}",
|
||||
if runner == "cargo" {
|
||||
" Please follow the Tauri setup guide: https://v2.tauri.app/start/prerequisites/"
|
||||
} else {
|
||||
""
|
||||
}
|
||||
)),
|
||||
Err(e) => Err(e.into()),
|
||||
),
|
||||
Err(e) => Err(Error::CommandFailed {
|
||||
command: runner,
|
||||
error: e,
|
||||
}),
|
||||
}?;
|
||||
let dev_child = Arc::new(dev_child);
|
||||
let dev_child_stderr = dev_child.take_stderr().unwrap();
|
||||
@ -164,7 +167,8 @@ pub fn build(
|
||||
}
|
||||
|
||||
if options.target == Some("universal-apple-darwin".into()) {
|
||||
std::fs::create_dir_all(&out_dir).with_context(|| "failed to create project out directory")?;
|
||||
std::fs::create_dir_all(&out_dir)
|
||||
.fs_context("failed to create project out directory", out_dir.clone())?;
|
||||
|
||||
let bin_name = bin_path.file_stem().unwrap();
|
||||
|
||||
@ -189,9 +193,9 @@ pub fn build(
|
||||
|
||||
let lipo_status = lipo_cmd.output_ok()?.status;
|
||||
if !lipo_status.success() {
|
||||
return Err(anyhow::anyhow!(
|
||||
crate::error::bail!(
|
||||
"Result of `lipo` command was unsuccessful: {lipo_status}. (Is `lipo` installed?)"
|
||||
));
|
||||
);
|
||||
}
|
||||
} else {
|
||||
build_production_app(options, available_targets, config_features)
|
||||
@ -210,8 +214,8 @@ fn build_production_app(
|
||||
let runner = build_cmd.get_program().to_string_lossy().into_owned();
|
||||
match build_cmd.piped() {
|
||||
Ok(status) if status.success() => Ok(()),
|
||||
Ok(_) => Err(anyhow::anyhow!("failed to build app")),
|
||||
Err(e) if e.kind() == ErrorKind::NotFound => Err(anyhow::anyhow!(
|
||||
Ok(_) => crate::error::bail!("failed to build app"),
|
||||
Err(e) if e.kind() == ErrorKind::NotFound => crate::error::bail!(
|
||||
"`{}` command not found.{}",
|
||||
runner,
|
||||
if runner == "cargo" {
|
||||
@ -219,8 +223,11 @@ fn build_production_app(
|
||||
} else {
|
||||
""
|
||||
}
|
||||
)),
|
||||
Err(e) => Err(e.into()),
|
||||
),
|
||||
Err(e) => Err(Error::CommandFailed {
|
||||
command: runner,
|
||||
error: e,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
@ -302,7 +309,7 @@ fn validate_target(
|
||||
if let Some(available_targets) = available_targets {
|
||||
if let Some(target) = available_targets.iter().find(|t| t.name == target) {
|
||||
if !target.installed {
|
||||
anyhow::bail!(
|
||||
crate::error::bail!(
|
||||
"Target {target} is not installed (installed targets: {installed}). Please run `rustup target add {target}`.",
|
||||
target = target.name,
|
||||
installed = available_targets.iter().filter(|t| t.installed).map(|t| t.name.as_str()).collect::<Vec<&str>>().join(", ")
|
||||
@ -310,7 +317,7 @@ fn validate_target(
|
||||
}
|
||||
}
|
||||
if !available_targets.iter().any(|t| t.name == target) {
|
||||
anyhow::bail!("Target {target} does not exist. Please run `rustup target list` to see the available targets.", target = target);
|
||||
crate::error::bail!("Target {target} does not exist. Please run `rustup target list` to see the available targets.", target = target);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
@ -328,13 +335,7 @@ fn rename_app(
|
||||
""
|
||||
};
|
||||
let new_path = bin_path.with_file_name(format!("{main_binary_name}{extension}"));
|
||||
fs::rename(&bin_path, &new_path).with_context(|| {
|
||||
format!(
|
||||
"failed to rename `{}` to `{}`",
|
||||
tauri_utils::display_path(bin_path),
|
||||
tauri_utils::display_path(&new_path),
|
||||
)
|
||||
})?;
|
||||
fs::rename(&bin_path, &new_path).fs_context("failed to rename app binary", bin_path)?;
|
||||
Ok(new_path)
|
||||
} else {
|
||||
Ok(bin_path)
|
||||
|
||||
@ -2,18 +2,31 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
use crate::Result;
|
||||
use crate::{
|
||||
error::{Error, ErrorExt},
|
||||
Result,
|
||||
};
|
||||
|
||||
use std::{fs::read_dir, path::PathBuf, process::Command};
|
||||
|
||||
pub fn installed_targets() -> Result<Vec<String>> {
|
||||
let output = Command::new("rustc")
|
||||
.args(["--print", "sysroot"])
|
||||
.output()?;
|
||||
.output()
|
||||
.map_err(|error| Error::CommandFailed {
|
||||
command: "rustc --print sysroot".to_string(),
|
||||
error,
|
||||
})?;
|
||||
let sysroot_path = PathBuf::from(String::from_utf8_lossy(&output.stdout).trim().to_string());
|
||||
|
||||
let mut targets = Vec::new();
|
||||
for entry in read_dir(sysroot_path.join("lib").join("rustlib"))?.flatten() {
|
||||
for entry in read_dir(sysroot_path.join("lib").join("rustlib"))
|
||||
.fs_context(
|
||||
"failed to read Rust sysroot",
|
||||
sysroot_path.join("lib").join("rustlib"),
|
||||
)?
|
||||
.flatten()
|
||||
{
|
||||
if entry.file_type().map(|t| t.is_dir()).unwrap_or_default() {
|
||||
let name = entry.file_name();
|
||||
if name != "etc" && name != "src" {
|
||||
|
||||
@ -2,19 +2,19 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
use crate::helpers::{
|
||||
app_paths::tauri_dir,
|
||||
config::{Config, PatternKind},
|
||||
use crate::{
|
||||
error::{Context, ErrorExt},
|
||||
helpers::{
|
||||
app_paths::tauri_dir,
|
||||
config::{Config, PatternKind},
|
||||
},
|
||||
};
|
||||
|
||||
use anyhow::Context;
|
||||
use itertools::Itertools;
|
||||
use toml_edit::{Array, DocumentMut, InlineTable, Item, TableLike, Value};
|
||||
|
||||
use std::{
|
||||
collections::{HashMap, HashSet},
|
||||
fs::File,
|
||||
io::Write,
|
||||
path::Path,
|
||||
};
|
||||
|
||||
@ -84,11 +84,11 @@ fn get_enabled_features(list: &HashMap<String, Vec<String>>, feature: &str) -> V
|
||||
|
||||
pub fn read_manifest(manifest_path: &Path) -> crate::Result<(DocumentMut, String)> {
|
||||
let manifest_str = std::fs::read_to_string(manifest_path)
|
||||
.with_context(|| format!("Failed to read `{manifest_path:?}` file"))?;
|
||||
.fs_context("failed to read Cargo.toml", manifest_path.to_path_buf())?;
|
||||
|
||||
let manifest: DocumentMut = manifest_str
|
||||
.parse::<DocumentMut>()
|
||||
.with_context(|| "Failed to parse Cargo.toml")?;
|
||||
.context("failed to parse Cargo.toml")?;
|
||||
|
||||
Ok((manifest, manifest_str))
|
||||
}
|
||||
@ -172,10 +172,10 @@ fn write_features<F: Fn(&str) -> bool>(
|
||||
*dep = Value::InlineTable(def);
|
||||
}
|
||||
_ => {
|
||||
return Err(anyhow::anyhow!(
|
||||
crate::error::bail!(
|
||||
"Unsupported {} dependency format on Cargo.toml",
|
||||
dependency_name
|
||||
))
|
||||
);
|
||||
}
|
||||
}
|
||||
Ok(true)
|
||||
@ -313,10 +313,8 @@ pub fn rewrite_manifest(config: &Config) -> crate::Result<(Manifest, bool)> {
|
||||
let new_manifest_str = serialize_manifest(&manifest);
|
||||
|
||||
if persist && original_manifest_str != new_manifest_str {
|
||||
let mut manifest_file =
|
||||
File::create(&manifest_path).with_context(|| "failed to open Cargo.toml for rewrite")?;
|
||||
manifest_file.write_all(new_manifest_str.as_bytes())?;
|
||||
manifest_file.flush()?;
|
||||
std::fs::write(&manifest_path, new_manifest_str)
|
||||
.fs_context("failed to rewrite Cargo manifest", &manifest_path)?;
|
||||
Ok((
|
||||
Manifest {
|
||||
inner: manifest,
|
||||
|
||||
@ -10,15 +10,13 @@
|
||||
)]
|
||||
#![cfg(any(target_os = "macos", target_os = "linux", windows))]
|
||||
|
||||
use anyhow::Context;
|
||||
pub use anyhow::Result;
|
||||
|
||||
mod acl;
|
||||
mod add;
|
||||
mod build;
|
||||
mod bundle;
|
||||
mod completions;
|
||||
mod dev;
|
||||
mod error;
|
||||
mod helpers;
|
||||
mod icon;
|
||||
mod info;
|
||||
@ -34,6 +32,7 @@ mod signer;
|
||||
use clap::{ArgAction, CommandFactory, FromArgMatches, Parser, Subcommand, ValueEnum};
|
||||
use env_logger::fmt::style::{AnsiColor, Style};
|
||||
use env_logger::Builder;
|
||||
pub use error::{Error, ErrorExt, Result};
|
||||
use log::Level;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::io::{BufReader, Write};
|
||||
@ -48,39 +47,43 @@ use std::{
|
||||
sync::{Arc, Mutex},
|
||||
};
|
||||
|
||||
use crate::error::Context;
|
||||
|
||||
/// Tauri configuration argument option.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ConfigValue(pub(crate) serde_json::Value);
|
||||
|
||||
impl FromStr for ConfigValue {
|
||||
type Err = anyhow::Error;
|
||||
type Err = Error;
|
||||
|
||||
fn from_str(config: &str) -> std::result::Result<Self, Self::Err> {
|
||||
if config.starts_with('{') {
|
||||
Ok(Self(
|
||||
serde_json::from_str(config).context("invalid configuration JSON")?,
|
||||
))
|
||||
Ok(Self(serde_json::from_str(config).with_context(|| {
|
||||
format!("failed to parse config `{config}` as JSON")
|
||||
})?))
|
||||
} else {
|
||||
let path = PathBuf::from(config);
|
||||
if path.exists() {
|
||||
let raw = &read_to_string(&path)
|
||||
.with_context(|| format!("invalid configuration at file {config}"))?;
|
||||
match path.extension() {
|
||||
Some(ext) if ext == "toml" => Ok(Self(::toml::from_str(raw)?)),
|
||||
Some(ext) if ext == "json5" => Ok(Self(::json5::from_str(raw)?)),
|
||||
// treat all other extensions as json
|
||||
_ => Ok(Self(
|
||||
// from tauri-utils/src/config/parse.rs:
|
||||
// we also want to support **valid** json5 in the .json extension
|
||||
// if the json5 is not valid the serde_json error for regular json will be returned.
|
||||
match ::json5::from_str(raw) {
|
||||
Ok(json5) => json5,
|
||||
Err(_) => serde_json::from_str(raw)?,
|
||||
},
|
||||
)),
|
||||
}
|
||||
} else {
|
||||
anyhow::bail!("provided configuration path does not exist")
|
||||
let raw =
|
||||
read_to_string(&path).fs_context("failed to read configuration file", path.clone())?;
|
||||
|
||||
match path.extension().and_then(|ext| ext.to_str()) {
|
||||
Some("toml") => Ok(Self(::toml::from_str(&raw).with_context(|| {
|
||||
format!("failed to parse config at {} as TOML", path.display())
|
||||
})?)),
|
||||
Some("json5") => Ok(Self(::json5::from_str(&raw).with_context(|| {
|
||||
format!("failed to parse config at {} as JSON5", path.display())
|
||||
})?)),
|
||||
// treat all other extensions as json
|
||||
_ => Ok(Self(
|
||||
// from tauri-utils/src/config/parse.rs:
|
||||
// we also want to support **valid** json5 in the .json extension
|
||||
// if the json5 is not valid the serde_json error for regular json will be returned.
|
||||
match ::json5::from_str(&raw) {
|
||||
Ok(json5) => json5,
|
||||
Err(_) => serde_json::from_str(&raw)
|
||||
.with_context(|| format!("failed to parse config at {} as JSON", path.display()))?,
|
||||
},
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -172,6 +175,13 @@ fn format_error<I: CommandFactory>(err: clap::Error) -> clap::Error {
|
||||
err.format(&mut app)
|
||||
}
|
||||
|
||||
fn get_verbosity(cli_verbose: u8) -> u8 {
|
||||
std::env::var("TAURI_CLI_VERBOSITY")
|
||||
.ok()
|
||||
.and_then(|v| v.parse().ok())
|
||||
.unwrap_or(cli_verbose)
|
||||
}
|
||||
|
||||
/// Run the Tauri CLI with the passed arguments, exiting if an error occurs.
|
||||
///
|
||||
/// The passed arguments should have the binary argument(s) stripped out before being passed.
|
||||
@ -190,19 +200,7 @@ where
|
||||
A: Into<OsString> + Clone,
|
||||
{
|
||||
if let Err(e) = try_run(args, bin_name) {
|
||||
let mut message = e.to_string();
|
||||
if e.chain().count() > 1 {
|
||||
message.push(':');
|
||||
}
|
||||
e.chain().skip(1).for_each(|cause| {
|
||||
let m = cause.to_string();
|
||||
if !message.contains(&m) {
|
||||
message.push('\n');
|
||||
message.push_str(" - ");
|
||||
message.push_str(&m);
|
||||
}
|
||||
});
|
||||
log::error!("{message}");
|
||||
log::error!("{e}");
|
||||
exit(1);
|
||||
}
|
||||
}
|
||||
@ -227,16 +225,12 @@ where
|
||||
Ok(s) => s,
|
||||
Err(e) => e.exit(),
|
||||
};
|
||||
|
||||
let verbosity_number = std::env::var("TAURI_CLI_VERBOSITY")
|
||||
.ok()
|
||||
.and_then(|v| v.parse().ok())
|
||||
.unwrap_or(cli.verbose);
|
||||
// set the verbosity level so subsequent CLI calls (xcode-script, android-studio-script) refer to it
|
||||
let verbosity_number = get_verbosity(cli.verbose);
|
||||
std::env::set_var("TAURI_CLI_VERBOSITY", verbosity_number.to_string());
|
||||
|
||||
let mut builder = Builder::from_default_env();
|
||||
let init_res = builder
|
||||
if let Err(err) = builder
|
||||
.format_indent(Some(12))
|
||||
.filter(None, verbosity_level(verbosity_number).to_level_filter())
|
||||
// golbin spams an insane amount of really technical logs on the debug level so we're reducing one level
|
||||
@ -256,7 +250,6 @@ where
|
||||
is_command_output = action == "stdout" || action == "stderr";
|
||||
if !is_command_output {
|
||||
let style = Style::new().fg_color(Some(AnsiColor::Green.into())).bold();
|
||||
|
||||
write!(f, "{style}{action:>12}{style:#} ")?;
|
||||
}
|
||||
} else {
|
||||
@ -270,15 +263,13 @@ where
|
||||
|
||||
if !is_command_output && log::log_enabled!(Level::Debug) {
|
||||
let style = Style::new().fg_color(Some(AnsiColor::Black.into()));
|
||||
|
||||
write!(f, "[{style}{}{style:#}] ", record.target())?;
|
||||
}
|
||||
|
||||
writeln!(f, "{}", record.args())
|
||||
})
|
||||
.try_init();
|
||||
|
||||
if let Err(err) = init_res {
|
||||
.try_init()
|
||||
{
|
||||
eprintln!("Failed to attach logger: {err}");
|
||||
}
|
||||
|
||||
@ -311,7 +302,7 @@ fn verbosity_level(num: u8) -> Level {
|
||||
match num {
|
||||
0 => Level::Info,
|
||||
1 => Level::Debug,
|
||||
2.. => Level::Trace,
|
||||
_ => Level::Trace,
|
||||
}
|
||||
}
|
||||
|
||||
@ -338,36 +329,51 @@ impl CommandExt for Command {
|
||||
self.stdin(os_pipe::dup_stdin()?);
|
||||
self.stdout(os_pipe::dup_stdout()?);
|
||||
self.stderr(os_pipe::dup_stderr()?);
|
||||
let program = self.get_program().to_string_lossy().into_owned();
|
||||
log::debug!(action = "Running"; "Command `{} {}`", program, self.get_args().map(|arg| arg.to_string_lossy()).fold(String::new(), |acc, arg| format!("{acc} {arg}")));
|
||||
|
||||
let program = self.get_program().to_string_lossy().into_owned();
|
||||
let args = self
|
||||
.get_args()
|
||||
.map(|a| a.to_string_lossy())
|
||||
.collect::<Vec<_>>()
|
||||
.join(" ");
|
||||
|
||||
log::debug!(action = "Running"; "Command `{program} {args}`");
|
||||
self.status()
|
||||
}
|
||||
|
||||
fn output_ok(&mut self) -> crate::Result<Output> {
|
||||
let program = self.get_program().to_string_lossy().into_owned();
|
||||
log::debug!(action = "Running"; "Command `{} {}`", program, self.get_args().map(|arg| arg.to_string_lossy()).fold(String::new(), |acc, arg| format!("{acc} {arg}")));
|
||||
let args = self
|
||||
.get_args()
|
||||
.map(|a| a.to_string_lossy())
|
||||
.collect::<Vec<_>>()
|
||||
.join(" ");
|
||||
let cmdline = format!("{program} {args}");
|
||||
log::debug!(action = "Running"; "Command `{cmdline}`");
|
||||
|
||||
self.stdout(Stdio::piped());
|
||||
self.stderr(Stdio::piped());
|
||||
|
||||
let mut child = self.spawn()?;
|
||||
let mut child = self
|
||||
.spawn()
|
||||
.with_context(|| format!("failed to run command `{cmdline}`"))?;
|
||||
|
||||
let mut stdout = child.stdout.take().map(BufReader::new).unwrap();
|
||||
let stdout_lines = Arc::new(Mutex::new(Vec::new()));
|
||||
let stdout_lines_ = stdout_lines.clone();
|
||||
std::thread::spawn(move || {
|
||||
let mut line = String::new();
|
||||
let mut lines = stdout_lines_.lock().unwrap();
|
||||
loop {
|
||||
line.clear();
|
||||
match stdout.read_line(&mut line) {
|
||||
Ok(0) => break,
|
||||
Ok(_) => {
|
||||
log::debug!(action = "stdout"; "{}", line.trim_end());
|
||||
lines.extend(line.as_bytes().to_vec());
|
||||
if let Ok(mut lines) = stdout_lines_.lock() {
|
||||
loop {
|
||||
line.clear();
|
||||
match stdout.read_line(&mut line) {
|
||||
Ok(0) => break,
|
||||
Ok(_) => {
|
||||
log::debug!(action = "stdout"; "{}", line.trim_end());
|
||||
lines.extend(line.as_bytes());
|
||||
}
|
||||
Err(_) => (),
|
||||
}
|
||||
Err(_) => (),
|
||||
}
|
||||
}
|
||||
});
|
||||
@ -377,21 +383,24 @@ impl CommandExt for Command {
|
||||
let stderr_lines_ = stderr_lines.clone();
|
||||
std::thread::spawn(move || {
|
||||
let mut line = String::new();
|
||||
let mut lines = stderr_lines_.lock().unwrap();
|
||||
loop {
|
||||
line.clear();
|
||||
match stderr.read_line(&mut line) {
|
||||
Ok(0) => break,
|
||||
Ok(_) => {
|
||||
log::debug!(action = "stderr"; "{}", line.trim_end());
|
||||
lines.extend(line.as_bytes().to_vec());
|
||||
if let Ok(mut lines) = stderr_lines_.lock() {
|
||||
loop {
|
||||
line.clear();
|
||||
match stderr.read_line(&mut line) {
|
||||
Ok(0) => break,
|
||||
Ok(_) => {
|
||||
log::debug!(action = "stderr"; "{}", line.trim_end());
|
||||
lines.extend(line.as_bytes());
|
||||
}
|
||||
Err(_) => (),
|
||||
}
|
||||
Err(_) => (),
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
let status = child.wait()?;
|
||||
let status = child
|
||||
.wait()
|
||||
.with_context(|| format!("failed to run command `{cmdline}`"))?;
|
||||
|
||||
let output = Output {
|
||||
status,
|
||||
@ -402,7 +411,10 @@ impl CommandExt for Command {
|
||||
if output.status.success() {
|
||||
Ok(output)
|
||||
} else {
|
||||
Err(anyhow::anyhow!("failed to run {}", program))
|
||||
crate::error::bail!(
|
||||
"failed to run command `{cmdline}`: command exited with status code {}",
|
||||
output.status.code().unwrap_or(-1)
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -417,4 +429,10 @@ mod tests {
|
||||
fn verify_cli() {
|
||||
Cli::command().debug_assert();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn help_output_includes_build() {
|
||||
let help = Cli::command().render_help().to_string();
|
||||
assert!(help.contains("Build"));
|
||||
}
|
||||
}
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
use crate::Result;
|
||||
use crate::{error::Context, ErrorExt, Result};
|
||||
|
||||
use serde_json::{Map, Value};
|
||||
use tauri_utils::acl::{
|
||||
@ -22,9 +22,17 @@ pub fn migrate(tauri_dir: &Path) -> Result<MigratedConfig> {
|
||||
{
|
||||
let migrated = migrate_config(&mut config)?;
|
||||
if config_path.extension().is_some_and(|ext| ext == "toml") {
|
||||
fs::write(&config_path, toml::to_string_pretty(&config)?)?;
|
||||
fs::write(
|
||||
&config_path,
|
||||
toml::to_string_pretty(&config).context("failed to serialize config")?,
|
||||
)
|
||||
.fs_context("failed to write config", config_path.clone())?;
|
||||
} else {
|
||||
fs::write(&config_path, serde_json::to_string_pretty(&config)?)?;
|
||||
fs::write(
|
||||
&config_path,
|
||||
serde_json::to_string_pretty(&config).context("failed to serialize config")?,
|
||||
)
|
||||
.fs_context("failed to write config", config_path.clone())?;
|
||||
}
|
||||
|
||||
let mut permissions: Vec<PermissionEntry> = vec!["core:default"]
|
||||
@ -34,7 +42,10 @@ pub fn migrate(tauri_dir: &Path) -> Result<MigratedConfig> {
|
||||
permissions.extend(migrated.permissions.clone());
|
||||
|
||||
let capabilities_path = config_path.parent().unwrap().join("capabilities");
|
||||
fs::create_dir_all(&capabilities_path)?;
|
||||
fs::create_dir_all(&capabilities_path).fs_context(
|
||||
"failed to create capabilities directory",
|
||||
capabilities_path.clone(),
|
||||
)?;
|
||||
fs::write(
|
||||
capabilities_path.join("migrated.json"),
|
||||
serde_json::to_string_pretty(&Capability {
|
||||
@ -46,7 +57,12 @@ pub fn migrate(tauri_dir: &Path) -> Result<MigratedConfig> {
|
||||
webviews: vec![],
|
||||
permissions,
|
||||
platforms: None,
|
||||
})?,
|
||||
})
|
||||
.context("failed to serialize capabilities")?,
|
||||
)
|
||||
.fs_context(
|
||||
"failed to write capabilities",
|
||||
capabilities_path.join("migrated.json"),
|
||||
)?;
|
||||
|
||||
return Ok(migrated);
|
||||
@ -88,7 +104,7 @@ fn migrate_config(config: &mut Value) -> Result<MigratedConfig> {
|
||||
}
|
||||
|
||||
// dangerousUseHttpScheme/useHttpsScheme
|
||||
let dangerouse_use_http = tauri_config
|
||||
let dangerous_use_http = tauri_config
|
||||
.get("security")
|
||||
.and_then(|w| w.as_object())
|
||||
.and_then(|w| {
|
||||
@ -104,7 +120,7 @@ fn migrate_config(config: &mut Value) -> Result<MigratedConfig> {
|
||||
{
|
||||
for window in windows {
|
||||
if let Some(window) = window.as_object_mut() {
|
||||
window.insert("useHttpsScheme".to_string(), (!dangerouse_use_http).into());
|
||||
window.insert("useHttpsScheme".to_string(), (!dangerous_use_http).into());
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -375,7 +391,8 @@ fn process_security(security: &mut Map<String, Value>) -> Result<()> {
|
||||
let csp = if csp_value.is_null() {
|
||||
csp_value
|
||||
} else {
|
||||
let mut csp: tauri_utils::config_v1::Csp = serde_json::from_value(csp_value)?;
|
||||
let mut csp: tauri_utils::config_v1::Csp =
|
||||
serde_json::from_value(csp_value).context("failed to deserialize CSP")?;
|
||||
match &mut csp {
|
||||
tauri_utils::config_v1::Csp::Policy(csp) => {
|
||||
if csp.contains("connect-src") {
|
||||
@ -399,7 +416,7 @@ fn process_security(security: &mut Map<String, Value>) -> Result<()> {
|
||||
}
|
||||
}
|
||||
}
|
||||
serde_json::to_value(csp)?
|
||||
serde_json::to_value(csp).context("failed to serialize CSP")?
|
||||
};
|
||||
|
||||
security.insert("csp".into(), csp);
|
||||
@ -423,7 +440,8 @@ fn process_allowlist(
|
||||
tauri_config: &mut Map<String, Value>,
|
||||
allowlist: Value,
|
||||
) -> Result<tauri_utils::config_v1::AllowlistConfig> {
|
||||
let allowlist: tauri_utils::config_v1::AllowlistConfig = serde_json::from_value(allowlist)?;
|
||||
let allowlist: tauri_utils::config_v1::AllowlistConfig =
|
||||
serde_json::from_value(allowlist).context("failed to deserialize allowlist")?;
|
||||
|
||||
if allowlist.protocol.asset_scope != Default::default() {
|
||||
let security = tauri_config
|
||||
@ -435,7 +453,8 @@ fn process_allowlist(
|
||||
let mut asset_protocol = Map::new();
|
||||
asset_protocol.insert(
|
||||
"scope".into(),
|
||||
serde_json::to_value(allowlist.protocol.asset_scope.clone())?,
|
||||
serde_json::to_value(allowlist.protocol.asset_scope.clone())
|
||||
.context("failed to serialize asset scope")?,
|
||||
);
|
||||
if allowlist.protocol.asset {
|
||||
asset_protocol.insert("enable".into(), true.into());
|
||||
@ -639,7 +658,10 @@ fn allowlist_to_permissions(
|
||||
|
||||
fn process_cli(plugins: &mut Map<String, Value>, cli: Value) -> Result<()> {
|
||||
if let Some(cli) = cli.as_object() {
|
||||
plugins.insert("cli".into(), serde_json::to_value(cli)?);
|
||||
plugins.insert(
|
||||
"cli".into(),
|
||||
serde_json::to_value(cli).context("failed to serialize CLI")?,
|
||||
);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
@ -663,7 +685,10 @@ fn process_updater(
|
||||
.unwrap_or_default()
|
||||
|| updater.get("pubkey").is_some()
|
||||
{
|
||||
plugins.insert("updater".into(), serde_json::to_value(updater)?);
|
||||
plugins.insert(
|
||||
"updater".into(),
|
||||
serde_json::to_value(updater).context("failed to serialize updater")?,
|
||||
);
|
||||
migrated.plugins.insert("updater".to_string());
|
||||
}
|
||||
}
|
||||
|
||||
@ -3,10 +3,10 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
use crate::{
|
||||
error::Context,
|
||||
helpers::{app_paths::walk_builder, npm::PackageManager},
|
||||
Result,
|
||||
Error, ErrorExt, Result,
|
||||
};
|
||||
use anyhow::Context;
|
||||
use itertools::Itertools;
|
||||
use magic_string::MagicString;
|
||||
use oxc_allocator::Allocator;
|
||||
@ -101,7 +101,8 @@ pub fn migrate(frontend_dir: &Path) -> Result<Vec<String>> {
|
||||
let path = entry.path();
|
||||
let ext = path.extension().unwrap_or_default();
|
||||
if JS_EXTENSIONS.iter().any(|e| e == &ext) {
|
||||
let js_contents = std::fs::read_to_string(path)?;
|
||||
let js_contents =
|
||||
std::fs::read_to_string(path).fs_context("failed to read JS file", path.to_path_buf())?;
|
||||
let new_contents = migrate_imports(
|
||||
path,
|
||||
&js_contents,
|
||||
@ -110,7 +111,7 @@ pub fn migrate(frontend_dir: &Path) -> Result<Vec<String>> {
|
||||
)?;
|
||||
if new_contents != js_contents {
|
||||
fs::write(path, new_contents)
|
||||
.with_context(|| format!("Error writing {}", path.display()))?;
|
||||
.fs_context("failed to write JS file", path.to_path_buf())?;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -166,7 +167,7 @@ fn migrate_imports<'a>(
|
||||
let allocator = Allocator::default();
|
||||
let ret = Parser::new(&allocator, js_source, source_type).parse();
|
||||
if !ret.errors.is_empty() {
|
||||
anyhow::bail!(
|
||||
crate::error::bail!(
|
||||
"failed to parse {} as valid Javascript/Typescript file",
|
||||
path.display()
|
||||
)
|
||||
@ -193,8 +194,12 @@ fn migrate_imports<'a>(
|
||||
new_module,
|
||||
Default::default(),
|
||||
)
|
||||
.map_err(|e| anyhow::anyhow!("{e}"))
|
||||
.context("failed to replace import source")?;
|
||||
.map_err(|e| {
|
||||
Error::Context(
|
||||
"failed to replace import source".to_string(),
|
||||
e.to_string().into(),
|
||||
)
|
||||
})?;
|
||||
|
||||
// if module was pluginified, add to packages
|
||||
if let Some(plugin_name) = new_module.strip_prefix("@tauri-apps/plugin-") {
|
||||
@ -279,8 +284,12 @@ fn migrate_imports<'a>(
|
||||
new_identifier,
|
||||
Default::default(),
|
||||
)
|
||||
.map_err(|e| anyhow::anyhow!("{e}"))
|
||||
.context("failed to rename identifier")?;
|
||||
.map_err(|e| {
|
||||
Error::Context(
|
||||
"failed to rename identifier".to_string(),
|
||||
e.to_string().into(),
|
||||
)
|
||||
})?;
|
||||
} else {
|
||||
// if None, we need to remove this specifier,
|
||||
// it will also be replaced with an import from its new plugin below
|
||||
@ -297,8 +306,12 @@ fn migrate_imports<'a>(
|
||||
|
||||
magic_js_source
|
||||
.remove(script_start + start as i64, script_start + end as i64)
|
||||
.map_err(|e| anyhow::anyhow!("{e}"))
|
||||
.context("failed to remove identifier")?;
|
||||
.map_err(|e| {
|
||||
Error::Context(
|
||||
"failed to remove identifier".to_string(),
|
||||
e.to_string().into(),
|
||||
)
|
||||
})?;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -322,8 +335,7 @@ fn migrate_imports<'a>(
|
||||
for import in imports_to_add {
|
||||
magic_js_source
|
||||
.append_right(script_start as u32 + start, &import)
|
||||
.map_err(|e| anyhow::anyhow!("{e}"))
|
||||
.context("failed to add import")?;
|
||||
.map_err(|e| Error::Context("failed to add import".to_string(), e.to_string().into()))?;
|
||||
}
|
||||
}
|
||||
|
||||
@ -331,8 +343,9 @@ fn migrate_imports<'a>(
|
||||
for stmt in stmts_to_add {
|
||||
magic_js_source
|
||||
.append_right(script_start as u32 + start, stmt)
|
||||
.map_err(|e| anyhow::anyhow!("{e}"))
|
||||
.context("failed to add statement")?;
|
||||
.map_err(|e| {
|
||||
Error::Context("failed to add statement".to_string(), e.to_string().into())
|
||||
})?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -3,11 +3,11 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
use crate::{
|
||||
error::ErrorExt,
|
||||
interface::rust::manifest::{read_manifest, serialize_manifest},
|
||||
Result,
|
||||
};
|
||||
|
||||
use anyhow::Context;
|
||||
use tauri_utils::config_v1::Allowlist;
|
||||
use toml_edit::{DocumentMut, Entry, Item, TableLike, Value};
|
||||
|
||||
@ -21,7 +21,7 @@ pub fn migrate(tauri_dir: &Path) -> Result<()> {
|
||||
migrate_manifest(&mut manifest)?;
|
||||
|
||||
std::fs::write(&manifest_path, serialize_manifest(&manifest))
|
||||
.context("failed to rewrite Cargo manifest")?;
|
||||
.fs_context("failed to rewrite Cargo manifest", &manifest_path)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -3,12 +3,11 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
use crate::{
|
||||
error::Context,
|
||||
helpers::app_paths::{frontend_dir, tauri_dir},
|
||||
Result,
|
||||
};
|
||||
|
||||
use anyhow::Context;
|
||||
|
||||
mod config;
|
||||
mod frontend;
|
||||
mod manifest;
|
||||
|
||||
@ -3,6 +3,7 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
use crate::{
|
||||
error::{Context, ErrorExt},
|
||||
helpers::{
|
||||
app_paths::{frontend_dir, tauri_dir},
|
||||
npm::PackageManager,
|
||||
@ -13,7 +14,6 @@ use crate::{
|
||||
|
||||
use std::{fs::read_to_string, path::Path};
|
||||
|
||||
use anyhow::Context;
|
||||
use toml_edit::{DocumentMut, Item, Table, TableLike, Value};
|
||||
|
||||
pub fn run() -> Result<()> {
|
||||
@ -29,7 +29,7 @@ pub fn run() -> Result<()> {
|
||||
migrate_npm_dependencies(frontend_dir)?;
|
||||
|
||||
std::fs::write(&manifest_path, serialize_manifest(&manifest))
|
||||
.context("failed to rewrite Cargo manifest")?;
|
||||
.fs_context("failed to rewrite Cargo manifest", &manifest_path)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@ -97,14 +97,19 @@ fn migrate_permissions(tauri_dir: &Path) -> Result<()> {
|
||||
];
|
||||
|
||||
for entry in walkdir::WalkDir::new(tauri_dir.join("capabilities")) {
|
||||
let entry = entry?;
|
||||
let entry = entry.map_err(std::io::Error::other).fs_context(
|
||||
"failed to walk capabilities directory",
|
||||
tauri_dir.join("capabilities"),
|
||||
)?;
|
||||
let path = entry.path();
|
||||
if path.extension().is_some_and(|ext| ext == "json") {
|
||||
let mut capability = read_to_string(path).context("failed to read capability")?;
|
||||
let mut capability =
|
||||
read_to_string(path).fs_context("failed to read capability", path.to_path_buf())?;
|
||||
for plugin in core_plugins {
|
||||
capability = capability.replace(&format!("\"{plugin}:"), &format!("\"core:{plugin}:"));
|
||||
}
|
||||
std::fs::write(path, capability).context("failed to rewrite capability")?;
|
||||
std::fs::write(path, capability)
|
||||
.fs_context("failed to rewrite capability", path.to_path_buf())?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
|
||||
@ -3,6 +3,7 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
use crate::{
|
||||
error::{bail, Context, ErrorExt},
|
||||
helpers::{
|
||||
app_paths::tauri_dir,
|
||||
cargo_manifest::{crate_version, CargoLock, CargoManifest},
|
||||
@ -13,8 +14,6 @@ use crate::{
|
||||
|
||||
use std::{fs::read_to_string, str::FromStr};
|
||||
|
||||
use anyhow::{bail, Context};
|
||||
|
||||
mod migrations;
|
||||
|
||||
pub fn command() -> Result<()> {
|
||||
@ -22,17 +21,24 @@ pub fn command() -> Result<()> {
|
||||
|
||||
let tauri_dir = tauri_dir();
|
||||
|
||||
let manifest_contents =
|
||||
read_to_string(tauri_dir.join("Cargo.toml")).context("failed to read Cargo manifest")?;
|
||||
let manifest = toml::from_str::<CargoManifest>(&manifest_contents)
|
||||
.context("failed to parse Cargo manifest")?;
|
||||
let manifest_contents = read_to_string(tauri_dir.join("Cargo.toml")).fs_context(
|
||||
"failed to read Cargo manifest",
|
||||
tauri_dir.join("Cargo.toml"),
|
||||
)?;
|
||||
let manifest = toml::from_str::<CargoManifest>(&manifest_contents).with_context(|| {
|
||||
format!(
|
||||
"failed to parse Cargo manifest {}",
|
||||
tauri_dir.join("Cargo.toml").display()
|
||||
)
|
||||
})?;
|
||||
|
||||
let workspace_dir = get_workspace_dir()?;
|
||||
let lock_path = workspace_dir.join("Cargo.lock");
|
||||
let lock = if lock_path.exists() {
|
||||
let lockfile_contents = read_to_string(lock_path).context("failed to read Cargo lockfile")?;
|
||||
let lock =
|
||||
toml::from_str::<CargoLock>(&lockfile_contents).context("failed to parse Cargo lockfile")?;
|
||||
let lockfile_contents =
|
||||
read_to_string(&lock_path).fs_context("failed to read Cargo lockfile", &lock_path)?;
|
||||
let lock = toml::from_str::<CargoLock>(&lockfile_contents)
|
||||
.with_context(|| format!("failed to parse Cargo lockfile {}", lock_path.display()))?;
|
||||
Some(lock)
|
||||
} else {
|
||||
None
|
||||
@ -41,7 +47,8 @@ pub fn command() -> Result<()> {
|
||||
let tauri_version = crate_version(tauri_dir, Some(&manifest), lock.as_ref(), "tauri")
|
||||
.version
|
||||
.context("failed to get tauri version")?;
|
||||
let tauri_version = semver::Version::from_str(&tauri_version)?;
|
||||
let tauri_version = semver::Version::from_str(&tauri_version)
|
||||
.with_context(|| format!("failed to parse tauri version {tauri_version}"))?;
|
||||
|
||||
if tauri_version.major == 1 {
|
||||
migrations::v1::run().context("failed to migrate from v1")?;
|
||||
|
||||
@ -4,14 +4,14 @@
|
||||
|
||||
use super::{detect_target_ok, ensure_init, env, get_app, get_config, read_options, MobileTarget};
|
||||
use crate::{
|
||||
error::{Context, ErrorExt},
|
||||
helpers::config::{get as get_tauri_config, reload as reload_tauri_config},
|
||||
interface::{AppInterface, Interface},
|
||||
mobile::CliOptions,
|
||||
Result,
|
||||
Error, Result,
|
||||
};
|
||||
use clap::{ArgAction, Parser};
|
||||
|
||||
use anyhow::Context;
|
||||
use cargo_mobile2::{
|
||||
android::{adb, target::Target},
|
||||
opts::Profile,
|
||||
@ -91,6 +91,7 @@ pub fn command(options: Options) -> Result<()> {
|
||||
config.app(),
|
||||
config.project_dir(),
|
||||
MobileTarget::Android,
|
||||
std::env::var("CI").is_ok(),
|
||||
)?;
|
||||
|
||||
if !cli_options.config.is_empty() {
|
||||
@ -103,7 +104,7 @@ pub fn command(options: Options) -> Result<()> {
|
||||
)?;
|
||||
}
|
||||
|
||||
let env = env()?;
|
||||
let env = env(std::env::var("CI").is_ok())?;
|
||||
|
||||
if cli_options.dev {
|
||||
let dev_url = tauri_config
|
||||
@ -144,17 +145,23 @@ pub fn command(options: Options) -> Result<()> {
|
||||
log::info!("Installing target {}", target.triple());
|
||||
target
|
||||
.install()
|
||||
.context("failed to install target with rustup")?;
|
||||
.map_err(|error| Error::CommandFailed {
|
||||
command: "rustup target add".to_string(),
|
||||
error,
|
||||
})
|
||||
.context("failed to install target")?;
|
||||
}
|
||||
|
||||
target.build(
|
||||
&config,
|
||||
&metadata,
|
||||
&env,
|
||||
cli_options.noise_level,
|
||||
true,
|
||||
profile,
|
||||
)?;
|
||||
target
|
||||
.build(
|
||||
&config,
|
||||
&metadata,
|
||||
&env,
|
||||
cli_options.noise_level,
|
||||
true,
|
||||
profile,
|
||||
)
|
||||
.context("failed to build Android app")?;
|
||||
|
||||
if !validated_lib {
|
||||
validated_lib = true;
|
||||
@ -164,17 +171,17 @@ pub fn command(options: Options) -> Result<()> {
|
||||
.target_dir(target.triple, profile)
|
||||
.join(config.so_name());
|
||||
|
||||
validate_lib(&lib_path)?;
|
||||
validate_lib(&lib_path).context("failed to validate library")?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
},
|
||||
)
|
||||
.map_err(|e| anyhow::anyhow!(e.to_string()))?
|
||||
.map_err(|e| Error::GenericError(e.to_string()))?
|
||||
}
|
||||
|
||||
fn validate_lib(path: &Path) -> Result<()> {
|
||||
let so_bytes = std::fs::read(path)?;
|
||||
let so_bytes = std::fs::read(path).fs_context("failed to read library", path.to_path_buf())?;
|
||||
let elf = elf::ElfBytes::<elf::endian::AnyEndian>::minimal_parse(&so_bytes)
|
||||
.context("failed to parse ELF")?;
|
||||
let (symbol_table, string_table) = elf
|
||||
@ -190,7 +197,7 @@ fn validate_lib(path: &Path) -> Result<()> {
|
||||
}
|
||||
|
||||
if !symbols.contains(&"Java_app_tauri_plugin_PluginManager_handlePluginResponse") {
|
||||
anyhow::bail!(
|
||||
crate::error::bail!(
|
||||
"Library from {} does not include required runtime symbols. This means you are likely missing the tauri::mobile_entry_point macro usage, see the documentation for more information: https://v2.tauri.app/start/migrate/from-tauri-1",
|
||||
path.display()
|
||||
);
|
||||
@ -237,7 +244,7 @@ fn adb_forward_port(
|
||||
let device = devices.first().unwrap();
|
||||
Some((device.serial_no().to_string(), device.name().to_string()))
|
||||
} else if devices.len() > 1 {
|
||||
anyhow::bail!("Multiple Android devices are connected ({}), please disconnect devices you do not intend to use so Tauri can determine which to use",
|
||||
crate::error::bail!("Multiple Android devices are connected ({}), please disconnect devices you do not intend to use so Tauri can determine which to use",
|
||||
devices.iter().map(|d| d.name()).collect::<Vec<_>>().join(", "));
|
||||
} else {
|
||||
// when building the app without running to a device, we might have an empty devices list
|
||||
@ -249,7 +256,11 @@ fn adb_forward_port(
|
||||
|
||||
// clear port forwarding for all devices
|
||||
for device in &devices {
|
||||
let reverse_list_output = adb_reverse_list(env, device.serial_no())?;
|
||||
let reverse_list_output =
|
||||
adb_reverse_list(env, device.serial_no()).map_err(|error| Error::CommandFailed {
|
||||
command: "adb reverse --list".to_string(),
|
||||
error,
|
||||
})?;
|
||||
|
||||
// check if the device has the port forwarded
|
||||
if String::from_utf8_lossy(&reverse_list_output.stdout).contains(&forward) {
|
||||
@ -271,11 +282,20 @@ fn adb_forward_port(
|
||||
log::info!("{forward} already forwarded to {target_device_name}");
|
||||
} else {
|
||||
loop {
|
||||
run_adb_reverse(env, &target_device_serial_no, &forward, &forward).with_context(|| {
|
||||
format!("failed to forward port with adb, is the {target_device_name} device connected?",)
|
||||
run_adb_reverse(env, &target_device_serial_no, &forward, &forward).map_err(|error| {
|
||||
Error::CommandFailed {
|
||||
command: format!("adb reverse {forward} {forward}"),
|
||||
error,
|
||||
}
|
||||
})?;
|
||||
|
||||
let reverse_list_output = adb_reverse_list(env, &target_device_serial_no)?;
|
||||
let reverse_list_output =
|
||||
adb_reverse_list(env, &target_device_serial_no).map_err(|error| {
|
||||
Error::CommandFailed {
|
||||
command: "adb reverse --list".to_string(),
|
||||
error,
|
||||
}
|
||||
})?;
|
||||
// wait and retry until the port has actually been forwarded
|
||||
if String::from_utf8_lossy(&reverse_list_output.stdout).contains(&forward) {
|
||||
break;
|
||||
|
||||
@ -8,18 +8,18 @@ use super::{
|
||||
};
|
||||
use crate::{
|
||||
build::Options as BuildOptions,
|
||||
error::Context,
|
||||
helpers::{
|
||||
app_paths::tauri_dir,
|
||||
config::{get as get_tauri_config, ConfigHandle},
|
||||
flock,
|
||||
},
|
||||
interface::{AppInterface, Interface, Options as InterfaceOptions},
|
||||
mobile::{write_options, CliOptions},
|
||||
ConfigValue, Result,
|
||||
mobile::{android::generate_tauri_properties, write_options, CliOptions, TargetDevice},
|
||||
ConfigValue, Error, Result,
|
||||
};
|
||||
use clap::{ArgAction, Parser};
|
||||
|
||||
use anyhow::Context;
|
||||
use cargo_mobile2::{
|
||||
android::{aab, apk, config::Config as AndroidConfig, env::Env, target::Target},
|
||||
opts::{NoiseLevel, Profile},
|
||||
@ -63,10 +63,10 @@ pub struct Options {
|
||||
pub split_per_abi: bool,
|
||||
/// Build APKs.
|
||||
#[clap(long)]
|
||||
pub apk: bool,
|
||||
pub apk: Option<bool>,
|
||||
/// Build AABs.
|
||||
#[clap(long)]
|
||||
pub aab: bool,
|
||||
pub aab: Option<bool>,
|
||||
/// Open Android Studio
|
||||
#[clap(short, long)]
|
||||
pub open: bool,
|
||||
@ -83,6 +83,9 @@ pub struct Options {
|
||||
/// Only use this when you are sure the mismatch is incorrectly detected as version mismatched Tauri packages can lead to unknown behavior.
|
||||
#[clap(long)]
|
||||
pub ignore_version_mismatches: bool,
|
||||
/// Target device of this build
|
||||
#[clap(skip)]
|
||||
pub target_device: Option<TargetDevice>,
|
||||
}
|
||||
|
||||
impl From<Options> for BuildOptions {
|
||||
@ -104,7 +107,15 @@ impl From<Options> for BuildOptions {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn command(options: Options, noise_level: NoiseLevel) -> Result<()> {
|
||||
pub struct BuiltApplication {
|
||||
pub config: AndroidConfig,
|
||||
pub interface: AppInterface,
|
||||
// prevent drop
|
||||
#[allow(dead_code)]
|
||||
options_handle: OptionsHandle,
|
||||
}
|
||||
|
||||
pub fn command(options: Options, noise_level: NoiseLevel) -> Result<BuiltApplication> {
|
||||
crate::helpers::app_paths::resolve();
|
||||
|
||||
delete_codegen_vars();
|
||||
@ -154,19 +165,31 @@ pub fn command(options: Options, noise_level: NoiseLevel) -> Result<()> {
|
||||
};
|
||||
|
||||
let tauri_path = tauri_dir();
|
||||
set_current_dir(tauri_path).with_context(|| "failed to change current working directory")?;
|
||||
set_current_dir(tauri_path).context("failed to set current directory to Tauri directory")?;
|
||||
|
||||
ensure_init(
|
||||
&tauri_config,
|
||||
config.app(),
|
||||
config.project_dir(),
|
||||
MobileTarget::Android,
|
||||
options.ci,
|
||||
)?;
|
||||
|
||||
let mut env = env()?;
|
||||
let mut env = env(options.ci)?;
|
||||
configure_cargo(&mut env, &config)?;
|
||||
|
||||
crate::build::setup(&interface, &mut build_options, tauri_config.clone(), true)?;
|
||||
generate_tauri_properties(
|
||||
&config,
|
||||
tauri_config.lock().unwrap().as_ref().unwrap(),
|
||||
false,
|
||||
)?;
|
||||
|
||||
{
|
||||
let config_guard = tauri_config.lock().unwrap();
|
||||
let config_ = config_guard.as_ref().unwrap();
|
||||
|
||||
crate::build::setup(&interface, &mut build_options, config_, true)?;
|
||||
}
|
||||
|
||||
let installed_targets =
|
||||
crate::interface::rust::installation::installed_targets().unwrap_or_default();
|
||||
@ -175,14 +198,20 @@ pub fn command(options: Options, noise_level: NoiseLevel) -> Result<()> {
|
||||
log::info!("Installing target {}", first_target.triple());
|
||||
first_target
|
||||
.install()
|
||||
.context("failed to install target with rustup")?;
|
||||
.map_err(|error| Error::CommandFailed {
|
||||
command: "rustup target add".to_string(),
|
||||
error,
|
||||
})
|
||||
.context("failed to install target")?;
|
||||
}
|
||||
// run an initial build to initialize plugins
|
||||
first_target.build(&config, &metadata, &env, noise_level, true, profile)?;
|
||||
first_target
|
||||
.build(&config, &metadata, &env, noise_level, true, profile)
|
||||
.context("failed to build Android app")?;
|
||||
|
||||
let open = options.open;
|
||||
let _handle = run_build(
|
||||
interface,
|
||||
let options_handle = run_build(
|
||||
&interface,
|
||||
options,
|
||||
build_options,
|
||||
tauri_config,
|
||||
@ -196,12 +225,16 @@ pub fn command(options: Options, noise_level: NoiseLevel) -> Result<()> {
|
||||
open_and_wait(&config, &env);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
Ok(BuiltApplication {
|
||||
config,
|
||||
interface,
|
||||
options_handle,
|
||||
})
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn run_build(
|
||||
interface: AppInterface,
|
||||
interface: &AppInterface,
|
||||
mut options: Options,
|
||||
build_options: BuildOptions,
|
||||
tauri_config: ConfigHandle,
|
||||
@ -210,10 +243,10 @@ fn run_build(
|
||||
env: &mut Env,
|
||||
noise_level: NoiseLevel,
|
||||
) -> Result<OptionsHandle> {
|
||||
if !(options.apk || options.aab) {
|
||||
if !(options.apk.is_some() || options.aab.is_some()) {
|
||||
// if the user didn't specify the format to build, we'll do both
|
||||
options.apk = true;
|
||||
options.aab = true;
|
||||
options.apk = Some(true);
|
||||
options.aab = Some(true);
|
||||
}
|
||||
|
||||
let interface_options = InterfaceOptions {
|
||||
@ -234,13 +267,13 @@ fn run_build(
|
||||
noise_level,
|
||||
vars: Default::default(),
|
||||
config: build_options.config,
|
||||
target_device: None,
|
||||
target_device: options.target_device.clone(),
|
||||
};
|
||||
let handle = write_options(tauri_config.lock().unwrap().as_ref().unwrap(), cli_options)?;
|
||||
|
||||
inject_resources(config, tauri_config.lock().unwrap().as_ref().unwrap())?;
|
||||
|
||||
let apk_outputs = if options.apk {
|
||||
let apk_outputs = if options.apk.unwrap_or_default() {
|
||||
apk::build(
|
||||
config,
|
||||
env,
|
||||
@ -248,12 +281,13 @@ fn run_build(
|
||||
profile,
|
||||
get_targets_or_all(options.targets.clone().unwrap_or_default())?,
|
||||
options.split_per_abi,
|
||||
)?
|
||||
)
|
||||
.context("failed to build APK")?
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
let aab_outputs = if options.aab {
|
||||
let aab_outputs = if options.aab.unwrap_or_default() {
|
||||
aab::build(
|
||||
config,
|
||||
env,
|
||||
@ -261,13 +295,18 @@ fn run_build(
|
||||
profile,
|
||||
get_targets_or_all(options.targets.unwrap_or_default())?,
|
||||
options.split_per_abi,
|
||||
)?
|
||||
)
|
||||
.context("failed to build AAB")?
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
log_finished(apk_outputs, "APK");
|
||||
log_finished(aab_outputs, "AAB");
|
||||
if !apk_outputs.is_empty() {
|
||||
log_finished(apk_outputs, "APK");
|
||||
}
|
||||
if !aab_outputs.is_empty() {
|
||||
log_finished(aab_outputs, "AAB");
|
||||
}
|
||||
|
||||
Ok(handle)
|
||||
}
|
||||
@ -285,12 +324,8 @@ fn get_targets_or_all<'a>(targets: Vec<String>) -> Result<Vec<&'a Target<'a>>> {
|
||||
.join(",");
|
||||
|
||||
for t in targets {
|
||||
let target = Target::for_name(&t).ok_or_else(|| {
|
||||
anyhow::anyhow!(
|
||||
"Target {} is invalid; the possible targets are {}",
|
||||
t,
|
||||
possible_targets
|
||||
)
|
||||
let target = Target::for_name(&t).with_context(|| {
|
||||
format!("Target {t} is invalid; the possible targets are {possible_targets}",)
|
||||
})?;
|
||||
outs.push(target);
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user