File Transfer (Local Network) (#34)

* feat: implement a file streamer for file share

Only server with hardcoded file path

* bump valibot version

* feat: add server-info gRPC module to serve server info

* feat: add ssl cert and public key to peers state

When peer is online, KK always have know its cert and pub key for future secure communication

* feat: add grpc ts package

* Enable "/refresh-worker-extension" rest API, grpc version isn't ready  yet

* update pnpm lock

* ci: fix CI by moving protobuf install order

* ci: fix

* upgrade api due to valibot incompatibility

* fix: use fs instead of bun shell to be compatible with windows

* skip grpc pkg build on windows

* feat: local network file transfer prototype working

* fix: grpc build.ts

* download next to 14

* ci: add ci env try to fix next

* fix: hideRefreshBtn and a few other btns' hide API in iframe ext page

* feat: disable NODE_TLS_REJECT_UNAUTHORIZED for extension HMR refresh

* fix: manifest json schema with objectWithRest to allow any other fields in package.json

* chore: update valibot and related dependencies to version 1.0.0-beta.9 in pnpm-lock.yaml

* ci: add protobuf compiler installation to manifest-schema-upload workflow

* refactor: move grpc code from jarvis to a separate grpc crate

for easier testing

* feat(file-transfer): POC multi file + directory file transfer

* feat(file-transfer): replace file transfer recursive download in ts with rust

* feat(file-transfer): implement on_progress event for file transfer

* feat(file-transfer): report progress every 1MB instead of 100 iterations

* feat(file-transfer): add progress bar

* feat(file-transfer): UI

* feat(file-transfer): add file transfer bucket info preview

Show total size and number of files

* feat(file-transfer): improve UX

Show bucket info during confirm; improve progress bar UI, prevent inconsistent width

* feat(grpc): skip build in Cloudflare Pages due to missing protoc

* refactor: with cargo fix, unused imports removed

* ci: debug cloudflare pages env var

* fix(grpc): update environment variable access for Cloudflare Pages build check

* fix(grpc): add error handling for protoc command in build script

* chore: update kkrpc version to 0.0.13, remove kkrpc submodule, and enhance grpc build script logging

- Updated kkrpc dependency version from 0.0.12 to 0.0.13 in package.json.
- Removed the kkrpc submodule from the project.
- Enhanced logging in the grpc build script to include additional Cloudflare Pages environment variables for better debugging.

* fix(api): typescript error, remove base.json from tsconfig

* chore: update pnpm lock

* fix(api): update TypeScript configuration to extend base.json and clean up unused options

* refactor(api): update TypeScript configuration to extend path-alias.json and enhance compiler options

* fix(api): restore KunkunManifestPermission in PermissionUnion and update valibot import in schema tests

* fix: missing trait error

* fix: js require replaced with import

* test: fix a unit test with a more robust method

---------

Co-authored-by: Huakun Shen <huaukun.shen@huakunshen.com>
This commit is contained in:
Huakun Shen 2024-12-11 08:14:40 -05:00 committed by GitHub
parent da8e37c4a1
commit 99b940b03b
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
110 changed files with 4226 additions and 2058 deletions

View File

@ -31,15 +31,6 @@ jobs:
deno-version: v2.x
- name: Install Rust stable
uses: dtolnay/rust-toolchain@stable
- name: Install Dependencies
run: pnpm install
- name: Setup
run: pnpm prepare
- name: Build
run: pnpm build
- name: JS Test
if: matrix.os == 'ubuntu-24.04'
run: pnpm test
- name: Install protobuf (Mac)
if: matrix.os == 'macos-14'
run: |
@ -55,11 +46,17 @@ jobs:
choco install protoc
echo "VCPKG_ROOT=$env:VCPKG_INSTALLATION_ROOT" | Out-File -FilePath $env:GITHUB_ENV -Append
vcpkg install openssl:x64-windows-static-md
# choco install openssl -y
# echo OPENSSL_DIR='"C:\\Program Files\\OpenSSL-Win64"' >> $env:GITHUB_ENV
# echo OPENSSL_INCLUDE_DIR='"C:\\Program Files\\OpenSSL-Win64\\include"' >> $env:GITHUB_ENV
# echo OPENSSL_LIB_DIR='"C:\\Program Files\\OpenSSL-Win64\\lib"' >> $env:GITHUB_ENV
# openssl version
- name: Install Dependencies
run: pnpm install
- name: Setup
run: pnpm prepare
- name: Build
env:
NODE_OPTIONS: --max-old-space-size=4096
run: pnpm build
- name: JS Test
if: matrix.os == 'ubuntu-24.04'
run: pnpm test
- name: Cargo Build and Test
if: matrix.os == 'ubuntu-24.04'
run: |

View File

@ -23,6 +23,10 @@ jobs:
- uses: denoland/setup-deno@v2
with:
deno-version: v2.x
- name: Install Protobuf
run: |
sudo apt-get update
sudo apt-get install -y protobuf-compiler
- name: Install Dependencies
run: pnpm install
- name: Setup

View File

@ -3,6 +3,7 @@
"svelte.svelte-vscode",
"tauri-apps.tauri-vscode",
"rust-lang.rust-analyzer",
"denoland.vscode-deno"
"denoland.vscode-deno",
"tamasfe.even-better-toml"
]
}

216
Cargo.lock generated
View File

@ -612,12 +612,6 @@ version = "0.22.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6"
[[package]]
name = "base64ct"
version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b"
[[package]]
name = "bindgen"
version = "0.69.5"
@ -1235,12 +1229,6 @@ version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fe1d7dcda7d1da79e444bdfba1465f2f849a58b07774e1df473ee77030cb47a7"
[[package]]
name = "const-oid"
version = "0.9.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8"
[[package]]
name = "const-random"
version = "0.1.18"
@ -1471,14 +1459,11 @@ dependencies = [
"axum-server",
"block-padding",
"cbc",
"hex",
"openssl",
"rand 0.8.5",
"reqwest 0.12.9",
"ring",
"rsa",
"rustls 0.23.16",
"sha2",
"tokio",
]
@ -1589,17 +1574,6 @@ version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da692b8d1080ea3045efaab14434d40468c3d8657e42abddfffca87b428f4c1b"
[[package]]
name = "der"
version = "0.7.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f55bf8e7b65898637379c1b74eb1551107c8294ed26d855ceb9fd1a09cfc9bc0"
dependencies = [
"const-oid",
"pem-rfc7468",
"zeroize",
]
[[package]]
name = "deranged"
version = "0.3.11"
@ -1652,7 +1626,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292"
dependencies = [
"block-buffer",
"const-oid",
"crypto-common",
"subtle",
]
@ -2026,9 +1999,9 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
[[package]]
name = "flate2"
version = "1.0.34"
version = "1.0.35"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1b589b4dc103969ad3cf85c950899926ec64300a1a46d76c03a6072957036f0"
checksum = "c936bfdafb507ebbf50b8074c54fa31c5be9a1e7e5f467dd659697041407d07c"
dependencies = [
"crc32fast",
"libz-ng-sys",
@ -2529,6 +2502,19 @@ dependencies = [
"system-deps",
]
[[package]]
name = "grpc"
version = "0.1.0"
dependencies = [
"anyhow",
"prost",
"serde",
"serde_json",
"tonic",
"tonic-build",
"uuid",
]
[[package]]
name = "gtk"
version = "0.18.1"
@ -3318,6 +3304,7 @@ dependencies = [
"anyhow",
"chrono",
"cocoa 0.24.1",
"crypto",
"log",
"mac-security-rs",
"mdns-sd",
@ -3358,9 +3345,6 @@ name = "lazy_static"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
dependencies = [
"spin",
]
[[package]]
name = "lazycell"
@ -3435,12 +3419,6 @@ dependencies = [
"windows-targets 0.52.6",
]
[[package]]
name = "libm"
version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8355be11b20d696c8f18f6cc018c4e372165b1fa8126cef092399c9951984ffa"
[[package]]
name = "libredox"
version = "0.1.3"
@ -3882,23 +3860,6 @@ dependencies = [
"num-traits",
]
[[package]]
name = "num-bigint-dig"
version = "0.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dc84195820f291c7697304f3cbdadd1cb7199c0efc917ff5eafd71225c136151"
dependencies = [
"byteorder",
"lazy_static",
"libm",
"num-integer",
"num-iter",
"num-traits",
"rand 0.8.5",
"smallvec",
"zeroize",
]
[[package]]
name = "num-conv"
version = "0.1.0"
@ -3936,17 +3897,6 @@ dependencies = [
"num-traits",
]
[[package]]
name = "num-iter"
version = "0.1.45"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1429034a0490724d0075ebb2bc9e875d6503c3cf69e235a8941aa757d83ef5bf"
dependencies = [
"autocfg",
"num-integer",
"num-traits",
]
[[package]]
name = "num-rational"
version = "0.4.2"
@ -3965,7 +3915,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841"
dependencies = [
"autocfg",
"libm",
]
[[package]]
@ -4468,15 +4417,6 @@ dependencies = [
"hmac",
]
[[package]]
name = "pem-rfc7468"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "88b39c9bfcfc231068454382784bb460aae594343fb030d46e9f50a645418412"
dependencies = [
"base64ct",
]
[[package]]
name = "percent-encoding"
version = "2.3.1"
@ -4670,27 +4610,6 @@ dependencies = [
"futures-io",
]
[[package]]
name = "pkcs1"
version = "0.7.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c8ffb9f10fa047879315e6625af03c164b16962a5368d724ed16323b68ace47f"
dependencies = [
"der",
"pkcs8",
"spki",
]
[[package]]
name = "pkcs8"
version = "0.10.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7"
dependencies = [
"der",
"spki",
]
[[package]]
name = "pkg-config"
version = "0.3.31"
@ -5448,27 +5367,6 @@ dependencies = [
"syn 1.0.109",
]
[[package]]
name = "rsa"
version = "0.9.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5d0e5124fcb30e76a7e79bfee683a2746db83784b86289f6251b54b7950a0dfc"
dependencies = [
"const-oid",
"digest",
"num-bigint-dig",
"num-integer",
"num-traits",
"pkcs1",
"pkcs8",
"rand_core 0.6.4",
"sha2",
"signature",
"spki",
"subtle",
"zeroize",
]
[[package]]
name = "rusqlite"
version = "0.31.0"
@ -5575,6 +5473,20 @@ dependencies = [
"sct",
]
[[package]]
name = "rustls"
version = "0.22.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bf4ef73721ac7bcd79b2b315da7779d8fc09718c6b3d2d1b2d94850eb8c18432"
dependencies = [
"log",
"ring",
"rustls-pki-types",
"rustls-webpki 0.102.8",
"subtle",
"zeroize",
]
[[package]]
name = "rustls"
version = "0.23.16"
@ -5979,16 +5891,6 @@ dependencies = [
"libc",
]
[[package]]
name = "signature"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de"
dependencies = [
"digest",
"rand_core 0.6.4",
]
[[package]]
name = "simd-adler32"
version = "0.3.7"
@ -6098,16 +6000,6 @@ dependencies = [
"lock_api",
]
[[package]]
name = "spki"
version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d"
dependencies = [
"base64ct",
"der",
]
[[package]]
name = "stable_deref_trait"
version = "1.2.0"
@ -6293,6 +6185,20 @@ dependencies = [
"windows 0.52.0",
]
[[package]]
name = "sysinfo"
version = "0.32.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3b5ae3f4f7d64646c46c4cae4e3f01d1c5d255c7406fdd7c7f999a94e488791"
dependencies = [
"core-foundation-sys",
"libc",
"memchr",
"ntapi",
"rayon",
"windows 0.56.0",
]
[[package]]
name = "system-configuration"
version = "0.5.1"
@ -6702,18 +6608,26 @@ dependencies = [
"crypto",
"db",
"flate2",
"futures-util",
"grpc",
"ico",
"log",
"mac-security-rs",
"mdns-sd",
"mime_guess",
"obfstr",
"openssl",
"plist",
"prost",
"rayon",
"reqwest 0.12.9",
"rust_search",
"rustls 0.23.16",
"serde",
"serde_json",
"strum",
"strum_macros",
"sysinfo 0.32.0",
"tar",
"tauri",
"tauri-icns",
@ -6721,9 +6635,12 @@ dependencies = [
"tauri-plugin-clipboard",
"tauri-plugin-network",
"tauri-plugin-store",
"tauri-plugin-upload",
"tauri-winres",
"thiserror 1.0.66",
"tokio",
"tokio-stream",
"tokio-util",
"tonic",
"tonic-build",
"tonic-reflection",
@ -6901,7 +6818,7 @@ version = "2.0.8"
dependencies = [
"serde",
"starship-battery",
"sysinfo",
"sysinfo 0.30.13",
"tauri",
"tauri-plugin",
"thiserror 1.0.66",
@ -6940,9 +6857,8 @@ dependencies = [
[[package]]
name = "tauri-plugin-upload"
version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "116754130d3f95cf73552a0723376186a8f21607ac5ff350980af87a0eccba73"
version = "2.2.1"
source = "git+https://github.com/HuakunShen/tauri-plugins-workspace.git?branch=kunkun#70f6d77c69715683a0b63841b49bf6c0c3b98d4c"
dependencies = [
"futures-util",
"log",
@ -6952,7 +6868,7 @@ dependencies = [
"serde_json",
"tauri",
"tauri-plugin",
"thiserror 1.0.66",
"thiserror 2.0.3",
"tokio",
"tokio-util",
]
@ -7257,6 +7173,17 @@ dependencies = [
"tokio",
]
[[package]]
name = "tokio-rustls"
version = "0.25.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "775e0c0f0adb3a2f22a00c4745d728b479985fc15ee7ca6a2608388c5569860f"
dependencies = [
"rustls 0.22.4",
"rustls-pki-types",
"tokio",
]
[[package]]
name = "tokio-rustls"
version = "0.26.0"
@ -7370,7 +7297,10 @@ dependencies = [
"percent-encoding",
"pin-project",
"prost",
"rustls-pemfile 2.2.0",
"rustls-pki-types",
"tokio",
"tokio-rustls 0.25.0",
"tokio-stream",
"tower",
"tower-layer",

View File

@ -8,6 +8,7 @@ members = [
"packages/mac-security-rs",
"packages/tauri-plugins/jarvis",
"packages/crypto",
"packages/grpc",
]
[workspace.dependencies]
@ -15,6 +16,7 @@ serde = { version = "1", features = ["derive"] }
anyhow = "1.0.86"
serde_json = "1"
tokio = { version = "1.0", features = ["macros", "rt-multi-thread", "signal"] }
tokio-util = "0.7.12"
mdns-sd = "0.11.1"
tauri-plugin-network = { path = "./vendors/tauri-plugin-network" }
tauri-plugin-clipboard = "2.1.8"
@ -24,6 +26,7 @@ strum = "0.26"
strum_macros = "0.26"
chrono = "0.4.38"
applications = { path = "./vendors/applications-rs" }
grpc = { path = "./packages/grpc" }
tauri-plugin-jarvis = { path = "./packages/tauri-plugins/jarvis" }
tauri-plugin-system-info = { path = "./vendors/tauri-plugin-system-info" }
db = { path = "./packages/db" }
@ -35,3 +38,9 @@ reqwest = { version = "0.12", features = ["json", "rustls-tls"] }
crypto = { path = "./packages/crypto" }
base64 = "0.22.1"
obfstr = "0.4.4"
sysinfo = "0.32.0"
openssl = "0.10.68"
tower = { version = "0.4", features = ["util"] }
tower-http = { version = "0.4.0", features = ["fs", "trace", "cors"] }
rayon = "1.10.0"
tauri-plugin-upload = { git = "https://github.com/HuakunShen/tauri-plugins-workspace.git", branch = "kunkun", package = "tauri-plugin-upload" }

View File

@ -30,9 +30,9 @@
"debug": "^4.3.6",
"fs-extra": "^11.2.0",
"inquirer": "^10.1.2",
"valibot": "^0.40.0"
"valibot": "^1.0.0-beta.8"
},
"files": [
"dist"
]
}
}

View File

@ -27,7 +27,7 @@
"commander": "^12.1.0",
"fs-extra": "^11.2.0",
"handlebars": "^4.7.8",
"valibot": "^0.40.0"
"valibot": "^1.0.0-beta.8"
},
"files": [
"dist"

View File

@ -24,8 +24,9 @@
"@tauri-apps/api": "^2.1.1",
"@tauri-apps/plugin-shell": "^2.0.1",
"gsap": "^3.12.5",
"kkrpc": "^0.0.12",
"kkrpc": "^0.0.13",
"lz-string": "^1.5.0",
"pretty-bytes": "^6.1.1",
"semver": "^7.6.3",
"svelte-sonner": "^0.3.28",
"sveltekit-superforms": "^2.20.1",

View File

@ -40,7 +40,8 @@ tauri-plugin-dialog = "2.0.1"
tauri-plugin-notification = "2.0.1"
tauri-plugin-os = "2.0.1"
tauri-plugin-http = "2.0.1"
tauri-plugin-upload = "2.0.1"
tauri-plugin-upload = { workspace = true }
# tauri-plugin-upload = "2.2.1"
tauri-plugin-jarvis = { workspace = true }
tauri-plugin-network = { workspace = true }
tauri-plugin-system-info = { workspace = true }
@ -48,6 +49,7 @@ tauri-plugin-clipboard = { workspace = true }
tauri-plugin-store = "2.1.0"
tauri-plugin-deep-link = "2"
tauri-plugin-log = { version = "2.0.1", features = ["colored"] }
crypto = { workspace = true }
zip = "2.1.3"
uuid = "1.11.0"
# tauri-plugin-devtools = "2.0.0"

View File

@ -173,8 +173,11 @@ pub fn run() {
));
app.manage(tauri_plugin_jarvis::model::app_state::AppState {});
tauri_plugin_jarvis::setup::server::setup_server(app.handle())?; // start the server
let mdns = tauri_plugin_jarvis::setup::peer_discovery::setup_mdns(my_port)?;
let jarvis_state = app.state::<tauri_plugin_jarvis::JarvisState>();
let mdns = tauri_plugin_jarvis::setup::peer_discovery::setup_mdns(
my_port,
crypto::RsaCrypto::public_key_to_string(&jarvis_state.rsa_public_key),
)?;
tauri_plugin_jarvis::setup::peer_discovery::handle_mdns_service_evt(
app.handle(),
mdns.browse()?,

View File

@ -36,6 +36,9 @@
]
},
"plugins": {
"fs": {
"requireLiteralLeadingDot": false
},
"updater": {
"endpoints": ["https://updater.kunkun.sh"],
"pubkey": "dW50cnVzdGVkIGNvbW1lbnQ6IG1pbmlzaWduIHB1YmxpYyBrZXk6IDc1NENCRjZFM0JBOEQ0ODMKUldTRDFLZzdicjlNZFhHS0ZKYk13WkdZUTFUM01LNjkvVW5Bb2x1SnB1R0crbFRuMnlRSlJ0STgK"

View File

@ -73,6 +73,18 @@ export const rawBuiltinCmds: BuiltinCmd[] = [
}, 2_000)
}
},
{
name: "File Transfer",
icon: {
type: IconEnum.Iconify,
value: "clarity:file-share-solid"
},
description: "",
function: async () => {
goto("/extension/file-transfer")
appState.clearSearchTerm()
}
},
{
name: "Add Dev Extension",
icon: {

View File

@ -1,5 +1,5 @@
<script lang="ts">
import { listen, TauriEvent, type UnlistenFn } from "@tauri-apps/api/event"
import { listen, TauriEvent, type EventCallback, type UnlistenFn } from "@tauri-apps/api/event"
import { getCurrentWebviewWindow } from "@tauri-apps/api/webviewWindow"
import { onDestroy, onMount, type Snippet } from "svelte"
@ -13,17 +13,18 @@
}: {
children: Snippet
onEnter?: (event: any) => void
onDrop?: (event: any) => void
onDrop?: EventCallback<{ paths: string[] }>
onCancelled?: (event: any) => void
onOver?: (event: any) => void
} = $props()
const appWin = getCurrentWebviewWindow()
onMount(async () => {
if (onEnter) await appWin.listen(TauriEvent.DRAG_ENTER, onEnter)
if (onDrop) await appWin.listen(TauriEvent.DRAG_DROP, onDrop)
if (onCancelled) await appWin.listen(TauriEvent.DRAG_LEAVE, onCancelled)
if (onOver) await appWin.listen(TauriEvent.DRAG_OVER, onOver)
if (onEnter) unlisteners.push(await appWin.listen(TauriEvent.DRAG_ENTER, onEnter))
if (onDrop)
unlisteners.push(await appWin.listen<{ paths: string[] }>(TauriEvent.DRAG_DROP, onDrop))
if (onCancelled) unlisteners.push(await appWin.listen(TauriEvent.DRAG_LEAVE, onCancelled))
if (onOver) unlisteners.push(await appWin.listen(TauriEvent.DRAG_OVER, onOver))
})
onDestroy(() => {

View File

@ -27,13 +27,13 @@ function createExtensionsStore(): Writable<ExtPackageJsonExtra[]> & {
})
}
function getExtensionsFromStore() {
function getExtensionsFromStore(): ExtPackageJsonExtra[] {
const extContainerPath = get(appConfig).extensionsInstallDir
if (!extContainerPath) return []
return get(extensions).filter((ext) => !extAPI.isExtPathInDev(extContainerPath, ext.extPath))
}
function findStoreExtensionByIdentifier(identifier: string) {
function findStoreExtensionByIdentifier(identifier: string): ExtPackageJsonExtra | undefined {
return get(extensions).find((ext) => ext.kunkun.identifier === identifier)
}
@ -42,7 +42,7 @@ function createExtensionsStore(): Writable<ExtPackageJsonExtra[]> & {
* @param extPath absolute path to the extension folder
* @returns loaded extension
*/
async function registerNewExtensionByPath(extPath: string) {
async function registerNewExtensionByPath(extPath: string): Promise<ExtPackageJsonExtra> {
return extAPI
.loadExtensionManifestFromDisk(await path.join(extPath, "package.json"))
.then((ext) => {
@ -65,16 +65,25 @@ function createExtensionsStore(): Writable<ExtPackageJsonExtra[]> & {
* @param extsDir absolute path to the extensions directory
* @returns loaded extension
*/
async function installTarball(tarballPath: string, extsDir: string) {
async function installTarball(
tarballPath: string,
extsDir: string
): Promise<ExtPackageJsonExtra> {
return extAPI.installTarballUrl(tarballPath, extsDir).then((extInstallPath) => {
return registerNewExtensionByPath(extInstallPath)
})
}
async function installDevExtensionDir(dirPath: string) {
return extAPI.installDevExtensionDir(dirPath).then((ext) => {
return registerNewExtensionByPath(ext.extPath)
})
async function installDevExtensionDir(dirPath: string): Promise<ExtPackageJsonExtra> {
return extAPI
.installDevExtensionDir(dirPath)
.then((ext) => {
return registerNewExtensionByPath(ext.extPath)
})
.catch((err) => {
console.error(err)
return Promise.reject(err)
})
}
async function installFromTarballUrl(tarballUrl: string, extsDir: string) {
@ -92,8 +101,6 @@ function createExtensionsStore(): Writable<ExtPackageJsonExtra[]> & {
async function uninstallExtensionByPath(targetPath: string) {
const targetExt = get(extensions).find((ext) => ext.extPath === targetPath)
if (!targetExt) throw new Error(`Extension ${targetPath} not registered in DB`)
console.log(extAPI)
return extAPI
.uninstallExtensionByPath(targetPath)
.then(() => store.update((exts) => exts.filter((ext) => ext.extPath !== targetExt.extPath)))

View File

@ -13,7 +13,6 @@
ModeWatcher,
themeConfigStore,
ThemeWrapper,
Toaster,
updateTheme,
type ThemeConfig
} from "@kksh/svelte5"
@ -25,6 +24,7 @@
import { gsap } from "gsap"
import { Flip } from "gsap/Flip"
import { onDestroy, onMount } from "svelte"
import { toast, Toaster } from "svelte-sonner"
import * as shellx from "tauri-plugin-shellx-api"
/* -------------------------------------------------------------------------- */

View File

@ -19,6 +19,7 @@
iconValue: v.string(),
invertIcon: v.boolean()
})
let icon = $state<Icon>({
type: IconEnum.Iconify,
value: "material-symbols:link",

View File

@ -0,0 +1,187 @@
<script lang="ts">
import DragNDrop from "@/components/common/DragNDrop.svelte"
import { cn } from "@/utils"
import { goBackOnEscape } from "@/utils/key"
import { goBack } from "@/utils/route"
import {
downloadFile,
downloadFiles,
fileTransferPreviewBucket,
getAllFileTransferBuckets,
getPeers,
localNetSendFile,
type ProgressPayload
} from "@kksh/api/commands"
import {
FileNode,
FileTransferPayload,
IconEnum,
MdnsServiceInfo,
type MdnsPeers
} from "@kksh/api/models"
import { Button, Card, Progress, Table } from "@kksh/svelte5"
import { IconMultiplexer, Layouts } from "@kksh/ui"
import { listen, type EventCallback, type UnlistenFn } from "@tauri-apps/api/event"
import * as path from "@tauri-apps/api/path"
import { confirm } from "@tauri-apps/plugin-dialog"
import * as fs from "@tauri-apps/plugin-fs"
import { ArrowLeftIcon, SendIcon } from "lucide-svelte"
import prettyBytes from "pretty-bytes"
import { onDestroy, onMount } from "svelte"
import { toast } from "svelte-sonner"
import FileIcon from "./file-icon.svelte"
let peers: MdnsPeers = $state({})
let dragging = $state(false)
let files: string[] = $state([])
let progressMap = $state<Record<string, ProgressPayload>>({})
let previewBucketInfo = $state<{ total_bytes: number; total_files: number } | null>(null)
let progresses = $derived(Object.values(progressMap))
let unlistenReq: UnlistenFn
$effect(() => {
if (files.length > 0) {
fileTransferPreviewBucket(files).then((info) => {
previewBucketInfo = info
})
}
})
async function getAllBuckets() {
const allBuckets = await getAllFileTransferBuckets()
console.log(allBuckets)
}
onMount(async () => {
peers = await getPeers()
unlistenReq = await listen<FileTransferPayload>("file-transfer-request", async (e) => {
console.log(e)
const confirmed = await confirm(
`Download files (${e.payload.totalFiles} files, ${prettyBytes(e.payload.totalBytes)})?`
)
if (!confirmed) return
downloadFiles(e.payload, await path.downloadDir(), (progress) => {
progressMap[e.payload.code] = progress
console.log(progress)
}).finally(() => {
console.log("finally clean", e.payload.code)
delete progressMap[e.payload.code]
})
})
})
onDestroy(async () => {
unlistenReq()
})
const onDrop: EventCallback<{ paths: string[] }> = async (e) => {
// keep only files not dirs
const filesStats = await Promise.all(e.payload.paths.map((p) => fs.stat(p)))
// keep only files based on stats
// const filesPaths = e.payload.paths.filter((p, i) => filesStats[i].isFile)
files = Array.from(new Set([...files, ...e.payload.paths]))
dragging = false
}
function sendFile(peer: MdnsServiceInfo, files: string[]) {
console.log(peer, files)
localNetSendFile(peer.addresses[0], peer.port, peer.sslCert, files)
}
</script>
<svelte:window on:keydown={goBackOnEscape} />
<Button variant="outline" size="icon" class="fixed left-2 top-2 z-50" onclick={goBack}>
<ArrowLeftIcon class="h-4 w-4" />
</Button>
<div class="h-12" data-tauri-drag-region></div>
<div class="container space-y-2">
{#if progresses.length > 0}
{#each progresses as progress}
{@const progressPerc = Math.round((progress.progressBytes / progress.totalBytes) * 100)}
<div class="flex items-center space-x-2">
<Progress value={progressPerc} class="whitespace-nowrap" />
<span class="w-12 whitespace-nowrap text-right">{progressPerc}%</span>
<span class="w-24 whitespace-nowrap text-right"
>{prettyBytes(progress.transferSpeedBytesPerSecond)}ps</span
>
</div>
{/each}
{/if}
<Layouts.Center>
<DragNDrop {onDrop} onEnter={() => (dragging = true)} onCancelled={() => (dragging = false)}>
<Card.Root
class={cn(
"w-96 space-y-2 px-2 py-3",
dragging ? "border-lime-400/30" : "text-white hover:text-blue-200"
)}
>
<button class="flex h-full w-full cursor-pointer items-center justify-center pt-2">
<div class={cn("flex flex-col items-center", dragging ? "text-lime-400/70" : "")}>
<IconMultiplexer
icon={{ value: "mdi:file", type: IconEnum.Iconify }}
class="h-10 w-10"
/>
<small class="select-none font-mono text-xs">Drag and Drop</small>
<small class="select-none font-mono text-xs">File To Send</small>
</div>
</button>
{#if files.length > 0}
<div>
<pre><strong>Total Files:</strong> {previewBucketInfo?.total_files ?? 0}</pre>
<pre><strong>Total Bytes:</strong> {prettyBytes(
previewBucketInfo?.total_bytes ?? 0
)}</pre>
</div>
<div class="w-full overflow-x-auto">
<div class="flex flex-nowrap gap-2 py-2">
{#each files as file}
<FileIcon
filepath={file}
onDelete={() => {
files = files.filter((f) => f !== file)
}}
/>
{/each}
</div>
</div>
{/if}
</Card.Root>
</DragNDrop>
</Layouts.Center>
<div class="container">
<!-- <Button onclick={getAllBuckets}>Get All Buckets</Button> -->
<Table.Root>
<Table.Caption>Peers in local network</Table.Caption>
<Table.Header>
<Table.Row>
<Table.Head class="w-[100px]">Hostname</Table.Head>
<Table.Head>Addreess</Table.Head>
<Table.Head>Port</Table.Head>
<Table.Head class="text-right">Send</Table.Head>
</Table.Row>
</Table.Header>
<Table.Body>
{#each Object.values(peers) as peer}
<Table.Row>
<Table.Cell class="font-medium">{peer.hostname}</Table.Cell>
<Table.Cell>{peer.addresses[0]}</Table.Cell>
<Table.Cell>{peer.port}</Table.Cell>
<Table.Cell class="text-right">
<Button
variant="outline"
size="icon"
onclick={() => {
sendFile(peer, files)
}}
>
<SendIcon class="h-4 w-4" />
</Button>
</Table.Cell>
</Table.Row>
{/each}
</Table.Body>
</Table.Root>
</div>
</div>

View File

@ -0,0 +1,35 @@
<script lang="ts">
import { Button, ButtonModule, Popover } from "@kksh/svelte5"
import { basename } from "@tauri-apps/api/path"
import { stat } from "@tauri-apps/plugin-fs"
import { DeleteIcon, FileIcon, FolderIcon, TrashIcon } from "lucide-svelte"
import { onMount } from "svelte"
const { filepath, onDelete } = $props()
let filename = $state("")
let isDirectory = $state(false)
onMount(async () => {
filename = await basename(filepath)
isDirectory = (await stat(filepath)).isDirectory
})
</script>
<Popover.Root>
<Popover.Trigger
class={ButtonModule.buttonVariants({ variant: "outline", size: "icon", class: "shrink-0" })}
>
{#if isDirectory}
<FolderIcon class="h-8 w-8 shrink-0" />
{:else}
<FileIcon class="h-8 w-8 shrink-0" />
{/if}
</Popover.Trigger>
<Popover.Content class="w-fit space-y-2">
<pre class="text-xs">{filepath}</pre>
<Button variant="destructive" size="icon" class="" onclick={onDelete}>
<TrashIcon />
</Button>
</Popover.Content>
</Popover.Root>

View File

@ -63,6 +63,7 @@
uiControl.showMoveBtn = false
},
hideRefreshButton: async () => {
console.log("hideRefreshButton")
uiControl.showRefreshBtn = false
},
showBackButton: async (position?: Position) => {
@ -153,7 +154,7 @@
</script>
<svelte:window on:keydown={goBackOnEscape} />
{#if uiControl.backBtnPosition}
{#if uiControl.backBtnPosition && uiControl.showBackBtn}
<Button
class={cn("absolute", positionToTailwindClasses(uiControl.backBtnPosition))}
size="icon"
@ -168,7 +169,7 @@
{/if}
</Button>
{/if}
{#if uiControl.moveBtnPosition}
{#if uiControl.moveBtnPosition && uiControl.showMoveBtn}
<Button
class={cn("absolute", positionToTailwindClasses(uiControl.moveBtnPosition))}
style={`${positionToCssStyleString(uiControl.moveBtnPosition)}`}
@ -179,7 +180,7 @@
<MoveIcon data-tauri-drag-region class="w-4" />
</Button>
{/if}
{#if uiControl.refreshBtnPosition}
{#if uiControl.refreshBtnPosition && uiControl.showRefreshBtn}
<Button
class={cn("absolute", positionToTailwindClasses(uiControl.refreshBtnPosition))}
style={`${positionToCssStyleString(uiControl.refreshBtnPosition)}`}

View File

@ -42,6 +42,11 @@
<Switch bind:checked={$appConfig.joinBetaProgram} />
</li>
<li>
<span>Developer Mode</span>
<Switch bind:checked={$appConfig.developerMode} />
</li>
<!-- <li>
<span>Language</span>
<Switch bind:checked={$appConfig} />

View File

@ -43,12 +43,12 @@
"@tauri-apps/plugin-shell": "^2.0.1",
"@tauri-apps/plugin-store": "^2.1.0",
"@tauri-apps/plugin-updater": "^2.0.0",
"@tauri-apps/plugin-upload": "^2.1.0",
"@tauri-apps/plugin-upload": "https://gitpkg.vercel.app/HuakunShen/tauri-plugins-workspace/plugins/upload?69b198b0ccba269fe7622a95ec6a33ae392bff03",
"supabase": "^1.219.2",
"tauri-plugin-network-api": "workspace:*",
"tauri-plugin-shellx-api": "^2.0.14",
"tauri-plugin-system-info-api": "workspace:*",
"valibot": "^0.40.0",
"valibot": "^1.0.0-beta.9",
"zod": "^3.23.8"
},
"workspaces": [

View File

@ -1,6 +1,6 @@
{
"name": "@kksh/api",
"version": "0.0.44",
"version": "0.0.46",
"type": "module",
"exports": {
".": "./src/index.ts",
@ -53,18 +53,18 @@
"@tauri-apps/plugin-shell": "^2.0.1",
"@tauri-apps/plugin-store": "^2.1.0",
"@tauri-apps/plugin-updater": "^2.0.0",
"@tauri-apps/plugin-upload": "^2.1.0",
"kkrpc": "^0.0.12",
"@tauri-apps/plugin-upload": "https://gitpkg.vercel.app/HuakunShen/tauri-plugins-workspace/plugins/upload?69b198b0ccba269fe7622a95ec6a33ae392bff03",
"kkrpc": "^0.0.13",
"lodash": "^4.17.21",
"minimatch": "^10.0.1",
"node-fetch": "^3.3.2",
"semver": "^7.6.3",
"svelte-sonner": "^0.3.28",
"tauri-api-adapter": "^0.3.13",
"tauri-api-adapter": "^0.3.16",
"tauri-plugin-network-api": "2.0.4",
"tauri-plugin-shellx-api": "^2.0.14",
"tauri-plugin-system-info-api": "2.0.8",
"valibot": "^0.40.0"
"valibot": "^1.0.0-beta.8"
},
"files": [
"src",

View File

@ -0,0 +1,79 @@
import { Channel, invoke } from "@tauri-apps/api/core"
import type { FilesBucket, FileTransferPayload } from "../models/file-transfer"
import { generateJarvisPluginCommand } from "./common"
export function getFileTransferBucketKeys(): Promise<string[]> {
return invoke<string[]>(generateJarvisPluginCommand("get_file_transfer_bucket_keys"))
}
export function getFileTransferBucketByKey(key: string): Promise<FilesBucket> {
return invoke<FilesBucket>(generateJarvisPluginCommand("get_file_transfer_bucket_by_key"), {
key
})
}
export function getAllFileTransferBuckets(): Promise<FilesBucket[]> {
return getFileTransferBucketKeys().then((keys) => {
return Promise.all(keys.map((key) => getFileTransferBucketByKey(key)))
})
}
export function localNetSendFile(
ip: string,
port: number,
certPem: string,
files: string[]
): Promise<void> {
return invoke(generateJarvisPluginCommand("local_net_send_file"), {
filesToSend: files,
ip,
port,
certPem
})
}
export function fileTransferPreviewBucket(
files: string[]
): Promise<{ total_bytes: number; total_files: number }> {
return invoke<{ total_bytes: number; total_files: number }>(
generateJarvisPluginCommand("file_transfer_preview_bucket"),
{
files
}
)
}
export function downloadFile(
code: string,
filePath: string,
sslCert: string,
url: string
): Promise<void> {
return invoke(generateJarvisPluginCommand("download_file"), { code, filePath, sslCert, url })
}
export type ProgressPayload = {
code: string
progressBytes: number
totalBytes: number
transferSpeedBytesPerSecond: number
currentFileName: string
totalFiles: number
currentFileIndex: number
}
export function downloadFiles(
payload: FileTransferPayload,
saveDir: string,
onProgressHandler?: (progress: ProgressPayload) => void
): Promise<void> {
const channel = new Channel<ProgressPayload>()
if (onProgressHandler) {
channel.onmessage = onProgressHandler
}
return invoke(generateJarvisPluginCommand("download_files"), {
payload,
saveDir,
onProgress: channel
})
}

View File

@ -13,3 +13,5 @@ export * from "./utils"
export * as macSecurity from "./mac-security"
export * from "./mdns"
export * from "./common"
export * from "./file-transfer"
export * as fileTransfer from "./file-transfer"

View File

@ -24,6 +24,7 @@ export function checkLocalKunkunService(port: number): Promise<boolean> {
return data["service_name"].toLowerCase() === DESKTOP_SERVICE_NAME.toLowerCase()
})
.catch((err) => {
// console.error("Failed to check local kunkun service", err)
// fetch fail, i.e. server not on this port
return false
})
@ -41,6 +42,7 @@ export async function findLocalhostKunkunPorts(): Promise<number[]> {
}
export async function refreshTemplateWorkerExtensionViaServer() {
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0"
const ports = await findLocalhostKunkunPorts()
console.log("Kunkun ports", ports)
if (ports.length === 0) {

View File

@ -2,7 +2,7 @@ import fs from "fs"
import path from "path"
import { PACKAGES_PATHS } from "@kksh/ci"
import { expect, test } from "bun:test"
import { safeParse } from "valibot"
import * as v from "valibot"
import { ExtPackageJson } from "../manifest"
test("Load and parse every extension in this repo", () => {
@ -19,11 +19,12 @@ test("Load and parse every extension in this repo", () => {
const pkgJsonContent = fs.readFileSync(packageJsonPath, "utf-8")
const pkgJson = JSON.parse(pkgJsonContent)
// validate package.json
// const result = parse(ExtPackageJson, pkgJson)
const parse = safeParse(ExtPackageJson, pkgJson)
const parse = v.safeParse(ExtPackageJson, pkgJson)
if (parse.issues) {
console.log(parse.issues)
console.log(v.flatten(parse.issues))
}
expect(parse.success).toBe(true)
})

View File

@ -0,0 +1,34 @@
import * as v from "valibot"
export type FileNode = {
filename: string
fileSize: number
id: string
type: number // 0: file, 1: directory
children: FileNode[]
}
export const FileNode: v.GenericSchema<FileNode> = v.object({
filename: v.string(),
fileSize: v.number(),
id: v.string(),
type: v.number(), // 0: file, 1: directory
children: v.array(v.lazy(() => FileNode))
})
export const FileTransferPayload = v.object({
port: v.string(),
code: v.string(),
totalBytes: v.number(),
totalFiles: v.number(),
sslCert: v.string(),
root: v.lazy(() => FileNode),
ip: v.string()
})
export type FileTransferPayload = v.InferOutput<typeof FileTransferPayload>
export const FilesBucket = v.object({
code: v.string(),
idPathMap: v.record(v.string(), v.string())
})
export type FilesBucket = v.InferOutput<typeof FilesBucket>

View File

@ -12,3 +12,4 @@ export { AllKunkunPermission, SystemPermissionSchema } from "../permissions"
export { Markdown as MarkdownSchema } from "../ui/worker/schema/markdown"
export * as ListSchema from "../ui/worker/schema/list"
export * as FormSchema from "../ui/worker/schema/form"
export * from "./file-transfer"

View File

@ -172,7 +172,6 @@ export const ExtPackageJson = object({
files: array(string("Files to include in the extension. e.g. ['dist']"))
})
export type ExtPackageJson = InferOutput<typeof ExtPackageJson>
/**
* Extra fields for ExtPackageJson
* e.g. path to the extension

View File

@ -6,7 +6,10 @@ export const MdnsServiceInfo = object({
hostname: string(),
port: number(),
service_type: string(),
subType: optional(string())
subType: optional(string()),
properties: optional(record(string(), string())),
publicKey: string(),
sslCert: string()
})
export type MdnsServiceInfo = InferOutput<typeof MdnsServiceInfo>

View File

@ -11,41 +11,42 @@ import {
// AllPermissionSchema as TauriApiAdapterAllPermissionSchema,
UpdownloadPermissionSchema
} from "tauri-api-adapter/permissions"
import { array, literal, object, optional, string, union, type InferOutput } from "valibot"
import * as v from "valibot"
export const SystemPermissionSchema = union([
literal("system:volumn"),
literal("system:boot"),
literal("system:disk"),
literal("system:apps"),
literal("system:fs"),
literal("system:ui")
export const SystemPermissionSchema = v.union([
v.literal("system:volumn"),
v.literal("system:boot"),
v.literal("system:disk"),
v.literal("system:apps"),
v.literal("system:fs"),
v.literal("system:ui")
])
export const KunkunFsPermissionSchema = union([
export const KunkunFsPermissionSchema = v.union([
FsPermissionSchema,
literal("fs:read-dir"),
literal("fs:stat"),
literal("fs:search")
v.literal("fs:read-dir"),
v.literal("fs:stat"),
v.literal("fs:search")
])
export const EventPermissionSchema = union([
literal("event:drag-drop"),
literal("event:drag-enter"),
literal("event:drag-leave"),
literal("event:drag-over"),
literal("event:window-blur"),
literal("event:window-close-requested"),
literal("event:window-focus")
export const EventPermissionSchema = v.union([
v.literal("event:drag-drop"),
v.literal("event:drag-enter"),
v.literal("event:drag-leave"),
v.literal("event:drag-over"),
v.literal("event:window-blur"),
v.literal("event:window-close-requested"),
v.literal("event:window-focus")
])
export const SecurityPermissionSchema = union([
literal("security:mac:reveal-security-pane"),
literal("security:mac:verify-fingerprint"),
literal("security:mac:reset-screencapture-permission"),
literal("security:mac:request-permission"),
literal("security:mac:check-permission"),
literal("security:mac:all")
export const SecurityPermissionSchema = v.union([
v.literal("security:mac:reveal-security-pane"),
v.literal("security:mac:verify-fingerprint"),
v.literal("security:mac:reset-screencapture-permission"),
v.literal("security:mac:request-permission"),
v.literal("security:mac:check-permission"),
v.literal("security:mac:all")
])
export type SecurityPermission = InferOutput<typeof SecurityPermissionSchema>
export type EventPermission = InferOutput<typeof EventPermissionSchema>
export type SecurityPermission = v.InferOutput<typeof SecurityPermissionSchema>
export type EventPermission = v.InferOutput<typeof EventPermissionSchema>
// export const DenoRuntimePermissionSchema = union([
// literal("deno:net"),
// literal("deno:env"),
@ -56,21 +57,21 @@ export type EventPermission = InferOutput<typeof EventPermissionSchema>
// literal("deno:sys")
// ])
// export type DenoRuntimePermission = InferOutput<typeof DenoRuntimePermissionSchema>
export const DenoSysOptions = union([
literal("hostname"),
literal("osRelease"),
literal("osUptime"),
literal("loadavg"),
literal("networkInterfaces"),
literal("systemMemoryInfo"),
literal("uid"),
literal("gid"),
literal("cpus"),
string()
export const DenoSysOptions = v.union([
v.literal("hostname"),
v.literal("osRelease"),
v.literal("osUptime"),
v.literal("loadavg"),
v.literal("networkInterfaces"),
v.literal("systemMemoryInfo"),
v.literal("uid"),
v.literal("gid"),
v.literal("cpus"),
v.string()
])
export type DenoSysOptions = InferOutput<typeof DenoSysOptions>
export const DenoPermissionScopeSchema = object({
export type DenoSysOptions = v.InferOutput<typeof DenoSysOptions>
export const DenoPermissionScopeSchema = v.object({
/* ------------------------------ Deno Related ------------------------------ */
// net: optional(array(string())),
// env: optional(array(string())),
@ -79,21 +80,21 @@ export const DenoPermissionScopeSchema = object({
// run: optional(array(string())),
// ffi: optional(array(string())),
// sys: optional(array(DenoSysOptions)),
net: optional(union([literal("*"), array(string())])),
env: optional(union([literal("*"), array(string())])),
read: optional(union([literal("*"), array(string())])),
write: optional(union([literal("*"), array(string())])),
run: optional(union([literal("*"), array(string())])),
ffi: optional(union([literal("*"), array(string())])),
sys: optional(union([literal("*"), array(DenoSysOptions)]))
net: v.optional(v.union([v.literal("*"), v.array(v.string())])),
env: v.optional(v.union([v.literal("*"), v.array(v.string())])),
read: v.optional(v.union([v.literal("*"), v.array(v.string())])),
write: v.optional(v.union([v.literal("*"), v.array(v.string())])),
run: v.optional(v.union([v.literal("*"), v.array(v.string())])),
ffi: v.optional(v.union([v.literal("*"), v.array(v.string())])),
sys: v.optional(v.union([v.literal("*"), v.array(DenoSysOptions)]))
})
export const PermissionScopeSchema = object({
path: optional(string()),
url: optional(string()),
cmd: optional(
object({
program: string(),
args: array(string())
export const PermissionScopeSchema = v.object({
path: v.optional(v.string()),
url: v.optional(v.string()),
cmd: v.optional(
v.object({
program: v.string(),
args: v.array(v.string())
})
),
...DenoPermissionScopeSchema.entries
@ -105,46 +106,46 @@ export const PermissionScopeSchema = object({
// deny: optional(array(PermissionScopeSchema))
// })
// export type DenoPermissionScoped = InferOutput<typeof DenoPermissionScopedSchema>
export type KunkunFsPermission = InferOutput<typeof KunkunFsPermissionSchema>
export const FsPermissionScopedSchema = object({
export type KunkunFsPermission = v.InferOutput<typeof KunkunFsPermissionSchema>
export const FsPermissionScopedSchema = v.object({
permission: KunkunFsPermissionSchema,
allow: optional(array(PermissionScopeSchema)),
deny: optional(array(PermissionScopeSchema))
allow: v.optional(v.array(PermissionScopeSchema)),
deny: v.optional(v.array(PermissionScopeSchema))
})
export type FsPermissionScoped = InferOutput<typeof FsPermissionScopedSchema>
export type FsPermissionScoped = v.InferOutput<typeof FsPermissionScopedSchema>
export const OpenPermissionSchema = union([
literal("open:url"),
literal("open:file"),
literal("open:folder")
export const OpenPermissionSchema = v.union([
v.literal("open:url"),
v.literal("open:file"),
v.literal("open:folder")
])
export const OpenPermissionScopedSchema = object({
export const OpenPermissionScopedSchema = v.object({
permission: OpenPermissionSchema,
allow: optional(array(PermissionScopeSchema)),
deny: optional(array(PermissionScopeSchema))
allow: v.optional(v.array(PermissionScopeSchema)),
deny: v.optional(v.array(PermissionScopeSchema))
})
export type OpenPermissionScoped = InferOutput<typeof OpenPermissionScopedSchema>
export type OpenPermissionScoped = v.InferOutput<typeof OpenPermissionScopedSchema>
export const ShellPermissionSchema = union([
literal("shell:execute"),
literal("shell:deno:execute"),
literal("shell:spawn"),
literal("shell:deno:spawn"),
literal("shell:open"),
literal("shell:kill"),
literal("shell:all"),
literal("shell:stdin-write")
export const ShellPermissionSchema = v.union([
v.literal("shell:execute"),
v.literal("shell:deno:execute"),
v.literal("shell:spawn"),
v.literal("shell:deno:spawn"),
v.literal("shell:open"),
v.literal("shell:kill"),
v.literal("shell:all"),
v.literal("shell:stdin-write")
])
export const ShellPermissionScopedSchema = object({
export const ShellPermissionScopedSchema = v.object({
permission: ShellPermissionSchema,
allow: optional(array(PermissionScopeSchema)),
deny: optional(array(PermissionScopeSchema))
allow: v.optional(v.array(PermissionScopeSchema)),
deny: v.optional(v.array(PermissionScopeSchema))
})
export type ShellPermissionScoped = InferOutput<typeof ShellPermissionScopedSchema>
export type ShellPermission = InferOutput<typeof ShellPermissionSchema>
export type ShellPermissionScoped = v.InferOutput<typeof ShellPermissionScopedSchema>
export type ShellPermission = v.InferOutput<typeof ShellPermissionSchema>
export type SystemPermission = InferOutput<typeof SystemPermissionSchema>
export const KunkunManifestPermission = union([
export type SystemPermission = v.InferOutput<typeof SystemPermissionSchema>
export const KunkunManifestPermission = v.union([
// TauriApiAdapterAllPermissionSchema,
ClipboardPermissionSchema,
EventPermissionSchema,
@ -162,9 +163,9 @@ export const KunkunManifestPermission = union([
SecurityPermissionSchema
// FsScopePermissionSchema
])
export const AllKunkunPermission = union([
export const AllKunkunPermission = v.union([
KunkunManifestPermission,
KunkunFsPermissionSchema,
OpenPermissionSchema
])
export type AllKunkunPermission = InferOutput<typeof AllKunkunPermission>
export type AllKunkunPermission = v.InferOutput<typeof AllKunkunPermission>

View File

@ -98,4 +98,4 @@ export const {
open,
app
} = api
export { Child, RPCChannel } from "../api/shell"
export { Child, RPCChannel, Command, DenoCommand } from "../api/shell"

View File

@ -12,12 +12,16 @@ export const breakingChangesVersionCheckpoints = [
{
version: "0.0.34",
changelog: "Replace comlink with kkrpc, extensions using comlink won't work anymore."
},
{
version: "0.0.45",
changelog: "Upgrade valibot to 1.0.0-beta.8; incompatible with previous version 0.40.0"
}
]
const checkpointVersions = breakingChangesVersionCheckpoints.map((c) => c.version)
const sortedCheckpointVersions = sort(checkpointVersions)
export const version = "0.0.44"
export const version = "0.0.46"
export function isVersionBetween(v: string, start: string, end: string) {
const vCleaned = clean(v)

View File

@ -1,5 +1,5 @@
{
"extends": "../typescript-config/base.json",
"extends": "../typescript-config/path-alias.json",
"compilerOptions": {
// Enable latest features
"lib": ["ESNext", "DOM"],

View File

@ -4,13 +4,10 @@ version = "0.1.0"
edition = "2021"
[dependencies]
openssl = "0.10.68"
rand = "0.8.5"
ring = "0.17.8"
rsa = { version = "0.9.6", features = ["sha2"] }
sha2 = "0.10.8"
openssl = { workspace = true }
anyhow = { workspace = true }
hex = "0.4.3"
axum = { workspace = true }
tokio = { workspace = true }
rustls = { workspace = true }

View File

@ -14,6 +14,14 @@ impl RsaCrypto {
Rsa::generate(2048).map_err(anyhow::Error::from)
}
pub fn private_key_to_public_key(private_key: &Rsa<Private>) -> Rsa<Public> {
let public_key_pem = private_key
.public_key_to_pem()
.expect("Failed to convert private key to public key");
RsaCrypto::public_key_from_pem(&public_key_pem)
.expect("Failed to convert pem to public key")
}
pub fn generate_rsa_key_pair_pem() -> anyhow::Result<(Vec<u8>, Vec<u8>)> {
let rsa = Rsa::generate(2048)?;
let private_pem = rsa.private_key_to_pem()?;
@ -29,6 +37,15 @@ impl RsaCrypto {
Rsa::public_key_from_pem(pem).map_err(anyhow::Error::from)
}
pub fn public_key_to_string(public_key: &Rsa<Public>) -> String {
String::from_utf8(
public_key
.public_key_to_pem()
.expect("Failed to convert public key to pem"),
)
.expect("Failed to convert public key pem to string")
}
pub fn encrypt_message(public_key: &Rsa<Public>, message: &[u8]) -> anyhow::Result<Vec<u8>> {
let mut encrypted = vec![0; public_key.size() as usize];
public_key.public_encrypt(message, &mut encrypted, openssl::rsa::Padding::PKCS1)?;

View File

@ -1,10 +1,10 @@
pub mod models;
pub mod schema;
use models::CmdType;
use rusqlite::{params, params_from_iter, Connection, Error, Result, ToSql};
use rusqlite::{params, params_from_iter, Connection, Result, ToSql};
use serde::{Deserialize, Serialize};
use std::path::{self, Path};
use strum_macros::{Display, EnumString};
use std::path::{Path};
use strum_macros::Display;
pub const DB_VERSION: u32 = 1;

View File

@ -1,5 +1,5 @@
use db::JarvisDB;
use rusqlite::{params, Connection, Result};
use rusqlite::Result;
fn main() -> Result<()> {
let db = JarvisDB::new("jarvis.db", None)?;

View File

@ -17,7 +17,7 @@
"@kksh/api": "workspace:*",
"@kksh/supabase": "workspace:*",
"@std/semver": "npm:@jsr/std__semver@^1.0.3",
"uuid": "^11.0.2"
"uuid": "^11.0.3"
},
"peerDependencies": {
"typescript": "^5.0.0"

View File

@ -3,7 +3,7 @@ import { ExtPackageJson, ExtPackageJsonExtra } from "@kksh/api/models"
import { basename, dirname, join } from "@tauri-apps/api/path"
import { readDir, readTextFile } from "@tauri-apps/plugin-fs"
import { debug, error } from "@tauri-apps/plugin-log"
import { flatten, safeParse } from "valibot"
import * as v from "valibot"
import { upsertExtension } from "./db"
/**
@ -14,11 +14,10 @@ import { upsertExtension } from "./db"
export function loadExtensionManifestFromDisk(manifestPath: string): Promise<ExtPackageJsonExtra> {
debug(`loadExtensionManifestFromDisk: ${manifestPath}`)
return readTextFile(manifestPath).then(async (content) => {
const parse = safeParse(ExtPackageJson, JSON.parse(content))
const parse = v.safeParse(ExtPackageJson, JSON.parse(content))
if (parse.issues) {
error(`Fail to load extension from ${manifestPath}. See console for parse error.`)
console.error(parse.issues)
console.error(JSON.stringify(flatten<typeof ExtPackageJson>(parse.issues), null, 2))
console.error(v.flatten<typeof ExtPackageJson>(parse.issues))
throw new Error(`Invalid manifest: ${manifestPath}`)
} else {
// debug(`Loaded extension ${parse.output.kunkun.identifier} from ${manifestPath}`)
@ -55,7 +54,9 @@ export function loadAllExtensionsFromDisk(
}
export async function loadAllExtensionsFromDb(): Promise<ExtPackageJsonExtra[]> {
console.log("loadAllExtensionsFromDb start")
const allDbExts = await (await db.getAllExtensions()).filter((ext) => ext.path)
console.log("allDbExts", allDbExts)
const results: ExtPackageJsonExtra[] = []
for (const ext of allDbExts) {
if (!ext.path) continue
@ -66,7 +67,7 @@ export async function loadAllExtensionsFromDb(): Promise<ExtPackageJsonExtra[]>
console.error(err)
error(`Failed to load extension ${ext.path} from database.`)
// delete this extension from database
await db.deleteExtensionByPath(ext.path)
// await db.deleteExtensionByPath(ext.path)
}
}
return results

View File

@ -0,0 +1,9 @@
{
"tasks": {
"dev": "deno run --watch main.ts"
},
"imports": {
"@kunkun/api": "jsr:@kunkun/api@^0.0.40",
"@std/assert": "jsr:@std/assert@1"
}
}

View File

@ -0,0 +1,896 @@
{
"version": "4",
"specifiers": {
"jsr:@kunkun/api@^0.0.40": "0.0.40",
"jsr:@std/assert@1": "1.0.8",
"jsr:@std/internal@^1.0.5": "1.0.5",
"npm:@kksh/api@^0.0.40": "0.0.40",
"npm:@tauri-apps/api@^2.1.1": "2.1.1",
"npm:@tauri-apps/plugin-fs@^2.0.2": "2.0.2",
"npm:@tauri-apps/plugin-os@2": "2.0.0",
"npm:kkrpc@^0.0.12": "0.0.12_typescript@5.7.2",
"npm:lodash@^4.17.21": "4.17.21",
"npm:minimatch@^10.0.1": "10.0.1",
"npm:semver@^7.6.3": "7.6.3",
"npm:svelte-sonner@~0.3.28": "0.3.28_svelte@5.2.7__acorn@8.14.0",
"npm:tauri-api-adapter@~0.3.12": "0.3.13_typescript@5.7.2",
"npm:tauri-plugin-shellx-api@^2.0.14": "2.0.14",
"npm:valibot@0.40": "0.40.0_typescript@5.7.2"
},
"jsr": {
"@kunkun/api@0.0.40": {
"integrity": "eab67c01e1cc87f3e5e7f7613a302cba7fccb18a1745f1a5508cf48df1e3649e",
"dependencies": [
"npm:@kksh/api",
"npm:@tauri-apps/api",
"npm:@tauri-apps/plugin-fs",
"npm:@tauri-apps/plugin-os",
"npm:kkrpc",
"npm:lodash",
"npm:minimatch",
"npm:semver",
"npm:svelte-sonner",
"npm:tauri-api-adapter",
"npm:tauri-plugin-shellx-api",
"npm:valibot"
]
},
"@std/assert@1.0.8": {
"integrity": "ebe0bd7eb488ee39686f77003992f389a06c3da1bbd8022184804852b2fa641b",
"dependencies": [
"jsr:@std/internal"
]
},
"@std/internal@1.0.5": {
"integrity": "54a546004f769c1ac9e025abd15a76b6671ddc9687e2313b67376125650dc7ba"
}
},
"npm": {
"@ampproject/remapping@2.3.0": {
"integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==",
"dependencies": [
"@jridgewell/gen-mapping",
"@jridgewell/trace-mapping"
]
},
"@isaacs/cliui@8.0.2": {
"integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==",
"dependencies": [
"string-width@5.1.2",
"string-width-cjs@npm:string-width@4.2.3",
"strip-ansi@7.1.0",
"strip-ansi-cjs@npm:strip-ansi@6.0.1",
"wrap-ansi@8.1.0",
"wrap-ansi-cjs@npm:wrap-ansi@7.0.0"
]
},
"@jridgewell/gen-mapping@0.3.5": {
"integrity": "sha512-IzL8ZoEDIBRWEzlCcRhOaCupYyN5gdIK+Q6fbFdPDg6HqX6jpkItn7DFIpW9LQzXG6Df9sA7+OKnq0qlz/GaQg==",
"dependencies": [
"@jridgewell/set-array",
"@jridgewell/sourcemap-codec",
"@jridgewell/trace-mapping"
]
},
"@jridgewell/resolve-uri@3.1.2": {
"integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw=="
},
"@jridgewell/set-array@1.2.1": {
"integrity": "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A=="
},
"@jridgewell/sourcemap-codec@1.5.0": {
"integrity": "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ=="
},
"@jridgewell/trace-mapping@0.3.25": {
"integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==",
"dependencies": [
"@jridgewell/resolve-uri",
"@jridgewell/sourcemap-codec"
]
},
"@kksh/api@0.0.40": {
"integrity": "sha512-g7n/vwGWs+5OMdxHUPOFKaX7vPqzTfXkRPTR33G+fWv6mdaypaS6fOAMnGmcoEgLPzL4RPO1WaL3+ypSCV620A==",
"dependencies": [
"@tauri-apps/api@2.1.1",
"@tauri-apps/cli",
"@tauri-apps/plugin-deep-link",
"@tauri-apps/plugin-dialog",
"@tauri-apps/plugin-fs",
"@tauri-apps/plugin-global-shortcut",
"@tauri-apps/plugin-http",
"@tauri-apps/plugin-log",
"@tauri-apps/plugin-notification",
"@tauri-apps/plugin-os",
"@tauri-apps/plugin-process",
"@tauri-apps/plugin-shell",
"@tauri-apps/plugin-store",
"@tauri-apps/plugin-updater",
"@tauri-apps/plugin-upload",
"kkrpc@0.0.10_typescript@5.7.2",
"lodash",
"minimatch@10.0.1",
"semver",
"svelte-sonner",
"tauri-api-adapter",
"tauri-plugin-network-api@2.0.4",
"tauri-plugin-shellx-api",
"tauri-plugin-system-info-api@2.0.8",
"valibot"
]
},
"@nodelib/fs.scandir@2.1.5": {
"integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==",
"dependencies": [
"@nodelib/fs.stat",
"run-parallel"
]
},
"@nodelib/fs.stat@2.0.5": {
"integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A=="
},
"@nodelib/fs.walk@1.2.8": {
"integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==",
"dependencies": [
"@nodelib/fs.scandir",
"fastq"
]
},
"@tauri-apps/api@2.0.1": {
"integrity": "sha512-eoQWT+Tq1qSwQpHV+nw1eNYe5B/nm1PoRjQCRiEOS12I1b+X4PUcREfXVX8dPcBT6GrzWGDtaecY0+1p0Rfqlw=="
},
"@tauri-apps/api@2.1.1": {
"integrity": "sha512-fzUfFFKo4lknXGJq8qrCidkUcKcH2UHhfaaCNt4GzgzGaW2iS26uFOg4tS3H4P8D6ZEeUxtiD5z0nwFF0UN30A=="
},
"@tauri-apps/cli-darwin-arm64@2.1.0": {
"integrity": "sha512-ESc6J6CE8hl1yKH2vJ+ALF+thq4Be+DM1mvmTyUCQObvezNCNhzfS6abIUd3ou4x5RGH51ouiANeT3wekU6dCw=="
},
"@tauri-apps/cli-darwin-x64@2.1.0": {
"integrity": "sha512-TasHS442DFs8cSH2eUQzuDBXUST4ECjCd0yyP+zZzvAruiB0Bg+c8A+I/EnqCvBQ2G2yvWLYG8q/LI7c87A5UA=="
},
"@tauri-apps/cli-linux-arm-gnueabihf@2.1.0": {
"integrity": "sha512-aP7ZBGNL4ny07Cbb6kKpUOSrmhcIK2KhjviTzYlh+pPhAptxnC78xQGD3zKQkTi2WliJLPmBYbOHWWQa57lQ9w=="
},
"@tauri-apps/cli-linux-arm64-gnu@2.1.0": {
"integrity": "sha512-ZTdgD5gLeMCzndMT2f358EkoYkZ5T+Qy6zPzU+l5vv5M7dHVN9ZmblNAYYXmoOuw7y+BY4X/rZvHV9pcGrcanQ=="
},
"@tauri-apps/cli-linux-arm64-musl@2.1.0": {
"integrity": "sha512-NzwqjUCilhnhJzusz3d/0i0F1GFrwCQbkwR6yAHUxItESbsGYkZRJk0yMEWkg3PzFnyK4cWTlQJMEU52TjhEzA=="
},
"@tauri-apps/cli-linux-x64-gnu@2.1.0": {
"integrity": "sha512-TyiIpMEtZxNOQmuFyfJwaaYbg3movSthpBJLIdPlKxSAB2BW0VWLY3/ZfIxm/G2YGHyREkjJvimzYE0i37PnMA=="
},
"@tauri-apps/cli-linux-x64-musl@2.1.0": {
"integrity": "sha512-/dQd0TlaxBdJACrR72DhynWftzHDaX32eBtS5WBrNJ+nnNb+znM3gON6nJ9tSE9jgDa6n1v2BkI/oIDtypfUXw=="
},
"@tauri-apps/cli-win32-arm64-msvc@2.1.0": {
"integrity": "sha512-NdQJO7SmdYqOcE+JPU7bwg7+odfZMWO6g8xF9SXYCMdUzvM2Gv/AQfikNXz5yS7ralRhNFuW32i5dcHlxh4pDg=="
},
"@tauri-apps/cli-win32-ia32-msvc@2.1.0": {
"integrity": "sha512-f5h8gKT/cB8s1ticFRUpNmHqkmaLutT62oFDB7N//2YTXnxst7EpMIn1w+QimxTvTk2gcx6EcW6bEk/y2hZGzg=="
},
"@tauri-apps/cli-win32-x64-msvc@2.1.0": {
"integrity": "sha512-P/+LrdSSb5Xbho1LRP4haBjFHdyPdjWvGgeopL96OVtrFpYnfC+RctB45z2V2XxqFk3HweDDxk266btjttfjGw=="
},
"@tauri-apps/cli@2.1.0": {
"integrity": "sha512-K2VhcKqBhAeS5pNOVdnR/xQRU6jwpgmkSL2ejHXcl0m+kaTggT0WRDQnFtPq6NljA7aE03cvwsbCAoFG7vtkJw==",
"dependencies": [
"@tauri-apps/cli-darwin-arm64",
"@tauri-apps/cli-darwin-x64",
"@tauri-apps/cli-linux-arm-gnueabihf",
"@tauri-apps/cli-linux-arm64-gnu",
"@tauri-apps/cli-linux-arm64-musl",
"@tauri-apps/cli-linux-x64-gnu",
"@tauri-apps/cli-linux-x64-musl",
"@tauri-apps/cli-win32-arm64-msvc",
"@tauri-apps/cli-win32-ia32-msvc",
"@tauri-apps/cli-win32-x64-msvc"
]
},
"@tauri-apps/plugin-deep-link@2.0.0": {
"integrity": "sha512-cDa2k1OrRU5DoKc0IXl1Y8RlFOU107u2phdZfT7FkApsC6TL/VAPs3YOUTT8p9/PZ50EjOKP104HFMqVqnQ0bw==",
"dependencies": [
"@tauri-apps/api@2.1.1"
]
},
"@tauri-apps/plugin-dialog@2.0.1": {
"integrity": "sha512-fnUrNr6EfvTqdls/ufusU7h6UbNFzLKvHk/zTuOiBq01R3dTODqwctZlzakdbfSp/7pNwTKvgKTAgl/NAP/Z0Q==",
"dependencies": [
"@tauri-apps/api@2.1.1"
]
},
"@tauri-apps/plugin-fs@2.0.2": {
"integrity": "sha512-4YZaX2j7ta81M5/DL8aN10kTnpUkEpkPo1FTYPT8Dd0ImHe3azM8i8MrtjrDGoyBYLPO3zFv7df/mSCYF8oA0Q==",
"dependencies": [
"@tauri-apps/api@2.1.1"
]
},
"@tauri-apps/plugin-global-shortcut@2.0.0": {
"integrity": "sha512-pnB4CUwFVjg4twtBSxoLJ4uLFTYxsvOdC1zIbG581pYzhYatOl6mjB+ijD5SSXgiS/jNoqMcfkOF9PWAisurew==",
"dependencies": [
"@tauri-apps/api@2.1.1"
]
},
"@tauri-apps/plugin-http@2.0.1": {
"integrity": "sha512-j6IA3pVBybSCwPpsihpX4z8bs6PluuGtr06ahL/xy4D8HunNBTmRmadJrFOQi0gOAbaig4MkQ15nzNLBLy8R1A==",
"dependencies": [
"@tauri-apps/api@2.1.1"
]
},
"@tauri-apps/plugin-log@2.0.0": {
"integrity": "sha512-C+NII9vzswqnOQE8k7oRtnaw0z5TZsMmnirRhXkCKDEhQQH9841Us/PC1WHtGiAaJ8za1A1JB2xXndEq/47X/w==",
"dependencies": [
"@tauri-apps/api@2.1.1"
]
},
"@tauri-apps/plugin-notification@2.0.0": {
"integrity": "sha512-6qEDYJS7mgXZWLXA0EFL+DVCJh8sJlzSoyw6B50pxhLPVFjc5Vr5DVzl5W3mUHaYhod5wsC984eQnlCCGqxYDA==",
"dependencies": [
"@tauri-apps/api@2.1.1"
]
},
"@tauri-apps/plugin-os@2.0.0": {
"integrity": "sha512-M7hG/nNyQYTJxVG/UhTKhp9mpXriwWzrs9mqDreB8mIgqA3ek5nHLdwRZJWhkKjZrnDT4v9CpA9BhYeplTlAiA==",
"dependencies": [
"@tauri-apps/api@2.1.1"
]
},
"@tauri-apps/plugin-process@2.0.0": {
"integrity": "sha512-OYzi0GnkrF4NAnsHZU7U3tjSoP0PbeAlO7T1Z+vJoBUH9sFQ1NSLqWYWQyf8hcb3gVWe7P1JggjiskO+LST1ug==",
"dependencies": [
"@tauri-apps/api@2.1.1"
]
},
"@tauri-apps/plugin-shell@2.0.1": {
"integrity": "sha512-akU1b77sw3qHiynrK0s930y8zKmcdrSD60htjH+mFZqv5WaakZA/XxHR3/sF1nNv9Mgmt/Shls37HwnOr00aSw==",
"dependencies": [
"@tauri-apps/api@2.1.1"
]
},
"@tauri-apps/plugin-store@2.1.0": {
"integrity": "sha512-GADqrc17opUKYIAKnGHIUgEeTZ2wJGu1ZITKQ1WMuOFdv8fvXRFBAqsqPjE3opgWohbczX6e1NpwmZK1AnuWVw==",
"dependencies": [
"@tauri-apps/api@2.1.1"
]
},
"@tauri-apps/plugin-updater@2.0.0": {
"integrity": "sha512-N0cl71g7RPr7zK2Fe5aoIwzw14NcdLcz7XMGFWZVjprsqgDRWoxbnUkknyCQMZthjhGkppCd/wN2MIsUz+eAhQ==",
"dependencies": [
"@tauri-apps/api@2.1.1"
]
},
"@tauri-apps/plugin-upload@2.1.0": {
"integrity": "sha512-nSIyxp2sAHsj+1RMs3obTP2lC1rzeWcMxHtzLYjnXLnJMXvWlFG0T7jEZ9Sg/OErvQxPVIvDW+12evuFKa5t8Q==",
"dependencies": [
"@tauri-apps/api@2.1.1"
]
},
"@types/estree@1.0.6": {
"integrity": "sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw=="
},
"acorn-typescript@1.4.13_acorn@8.14.0": {
"integrity": "sha512-xsc9Xv0xlVfwp2o7sQ+GCQ1PgbkdcpWdTzrwXxO3xDMTAywVS3oXVOcOHuRjAPkS4P9b+yc/qNF15460v+jp4Q==",
"dependencies": [
"acorn"
]
},
"acorn@8.14.0": {
"integrity": "sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA=="
},
"ansi-regex@5.0.1": {
"integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="
},
"ansi-regex@6.1.0": {
"integrity": "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA=="
},
"ansi-styles@4.3.0": {
"integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
"dependencies": [
"color-convert"
]
},
"ansi-styles@6.2.1": {
"integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug=="
},
"anymatch@3.1.3": {
"integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==",
"dependencies": [
"normalize-path",
"picomatch"
]
},
"aria-query@5.3.2": {
"integrity": "sha512-COROpnaoap1E2F000S62r6A60uHZnmlvomhfyT2DlTcrY1OrBKn2UhH7qn5wTC9zMvD0AY7csdPSNwKP+7WiQw=="
},
"array-union@2.1.0": {
"integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw=="
},
"axobject-query@4.1.0": {
"integrity": "sha512-qIj0G9wZbMGNLjLmg1PT6v2mE9AH2zlnADJD/2tC6E00hgmhUOfEB6greHPAfLRSufHqROIUTkw6E+M3lH0PTQ=="
},
"balanced-match@1.0.2": {
"integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="
},
"binary-extensions@2.3.0": {
"integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw=="
},
"brace-expansion@1.1.11": {
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
"dependencies": [
"balanced-match",
"concat-map"
]
},
"brace-expansion@2.0.1": {
"integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
"dependencies": [
"balanced-match"
]
},
"braces@3.0.3": {
"integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==",
"dependencies": [
"fill-range"
]
},
"chokidar@3.6.0": {
"integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==",
"dependencies": [
"anymatch",
"braces",
"fsevents",
"glob-parent",
"is-binary-path",
"is-glob",
"normalize-path",
"readdirp"
]
},
"color-convert@2.0.1": {
"integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
"dependencies": [
"color-name"
]
},
"color-name@1.1.4": {
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="
},
"commander@9.5.0": {
"integrity": "sha512-KRs7WVDKg86PWiuAqhDrAQnTXZKraVcCc6vFdL14qrZ/DcWwuRo7VoiYXalXO7S5GKpqYiVEwCbgFDfxNHKJBQ=="
},
"concat-map@0.0.1": {
"integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg=="
},
"cross-spawn@7.0.6": {
"integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==",
"dependencies": [
"path-key",
"shebang-command",
"which"
]
},
"dir-glob@3.0.1": {
"integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==",
"dependencies": [
"path-type"
]
},
"eastasianwidth@0.2.0": {
"integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA=="
},
"emoji-regex@8.0.0": {
"integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="
},
"emoji-regex@9.2.2": {
"integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg=="
},
"esm-env@1.1.4": {
"integrity": "sha512-oO82nKPHKkzIj/hbtuDYy/JHqBHFlMIW36SDiPCVsj87ntDLcWN+sJ1erdVryd4NxODacFTsdrIE3b7IamqbOg=="
},
"esrap@1.2.2": {
"integrity": "sha512-F2pSJklxx1BlQIQgooczXCPHmcWpn6EsP5oo73LQfonG9fIlIENQ8vMmfGXeojP9MrkzUNAfyU5vdFlR9shHAw==",
"dependencies": [
"@jridgewell/sourcemap-codec",
"@types/estree"
]
},
"fast-glob@3.3.2": {
"integrity": "sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==",
"dependencies": [
"@nodelib/fs.stat",
"@nodelib/fs.walk",
"glob-parent",
"merge2",
"micromatch"
]
},
"fastq@1.17.1": {
"integrity": "sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w==",
"dependencies": [
"reusify"
]
},
"fill-range@7.1.1": {
"integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==",
"dependencies": [
"to-regex-range"
]
},
"foreground-child@3.3.0": {
"integrity": "sha512-Ld2g8rrAyMYFXBhEqMz8ZAHBi4J4uS1i/CxGMDnjyFWddMXLVcDp051DZfu+t7+ab7Wv6SMqpWmyFIj5UbfFvg==",
"dependencies": [
"cross-spawn",
"signal-exit"
]
},
"fs.realpath@1.0.0": {
"integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw=="
},
"fsevents@2.3.3": {
"integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw=="
},
"function-bind@1.1.2": {
"integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA=="
},
"glob-parent@5.1.2": {
"integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==",
"dependencies": [
"is-glob"
]
},
"glob@11.0.0": {
"integrity": "sha512-9UiX/Bl6J2yaBbxKoEBRm4Cipxgok8kQYcOPEhScPwebu2I0HoQOuYdIO6S3hLuWoZgpDpwQZMzTFxgpkyT76g==",
"dependencies": [
"foreground-child",
"jackspeak",
"minimatch@10.0.1",
"minipass",
"package-json-from-dist",
"path-scurry"
]
},
"glob@7.2.3": {
"integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
"dependencies": [
"fs.realpath",
"inflight",
"inherits",
"minimatch@3.1.2",
"once",
"path-is-absolute"
]
},
"globby@11.1.0": {
"integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==",
"dependencies": [
"array-union",
"dir-glob",
"fast-glob",
"ignore",
"merge2",
"slash"
]
},
"hasown@2.0.2": {
"integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==",
"dependencies": [
"function-bind"
]
},
"ignore@5.3.2": {
"integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g=="
},
"inflight@1.0.6": {
"integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==",
"dependencies": [
"once",
"wrappy"
]
},
"inherits@2.0.4": {
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="
},
"interpret@1.4.0": {
"integrity": "sha512-agE4QfB2Lkp9uICn7BAqoscw4SZP9kTE2hxiFI3jBPmXJfdqiahTbUuKGsMoN2GtqL9AxhYioAcVvgsb1HvRbA=="
},
"is-binary-path@2.1.0": {
"integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==",
"dependencies": [
"binary-extensions"
]
},
"is-core-module@2.15.1": {
"integrity": "sha512-z0vtXSwucUJtANQWldhbtbt7BnL0vxiFjIdDLAatwhDYty2bad6s+rijD6Ri4YuYJubLzIJLUidCh09e1djEVQ==",
"dependencies": [
"hasown"
]
},
"is-extglob@2.1.1": {
"integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ=="
},
"is-fullwidth-code-point@3.0.0": {
"integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg=="
},
"is-glob@4.0.3": {
"integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==",
"dependencies": [
"is-extglob"
]
},
"is-number@7.0.0": {
"integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng=="
},
"is-reference@3.0.3": {
"integrity": "sha512-ixkJoqQvAP88E6wLydLGGqCJsrFUnqoH6HnaczB8XmDH1oaWU+xxdptvikTgaEhtZ53Ky6YXiBuUI2WXLMCwjw==",
"dependencies": [
"@types/estree"
]
},
"isexe@2.0.0": {
"integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw=="
},
"jackspeak@4.0.2": {
"integrity": "sha512-bZsjR/iRjl1Nk1UkjGpAzLNfQtzuijhn2g+pbZb98HQ1Gk8vM9hfbxeMBP+M2/UUdwj0RqGG3mlvk2MsAqwvEw==",
"dependencies": [
"@isaacs/cliui"
]
},
"kkrpc@0.0.10_typescript@5.7.2": {
"integrity": "sha512-lkQKVnN9f6JrS4ybKbGkV4mtuGhWYLTnaWx60ysytEap+sP5jcTbAuJlSrY6JqlwaohiS0X3ZbvJ2rAXYRdTng==",
"dependencies": [
"typescript",
"ws"
]
},
"kkrpc@0.0.12_typescript@5.7.2": {
"integrity": "sha512-PBk4AhGfkesIdAwmIoj7dHHIp7qN97XT4yr5Rl7h2WL79gxWQVgZRJYLt7Gb17GoLDh991rnL85mhCoPG5VC/Q==",
"dependencies": [
"typescript",
"ws"
]
},
"locate-character@3.0.0": {
"integrity": "sha512-SW13ws7BjaeJ6p7Q6CO2nchbYEc3X3J6WrmTTDto7yMPqVSZTUyY5Tjbid+Ab8gLnATtygYtiDIJGQRRn2ZOiA=="
},
"lodash@4.17.21": {
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="
},
"lru-cache@11.0.2": {
"integrity": "sha512-123qHRfJBmo2jXDbo/a5YOQrJoHF/GNQTLzQ5+IdK5pWpceK17yRc6ozlWd25FxvGKQbIUs91fDFkXmDHTKcyA=="
},
"magic-string@0.30.13": {
"integrity": "sha512-8rYBO+MsWkgjDSOvLomYnzhdwEG51olQ4zL5KXnNJWV5MNmrb4rTZdrtkhxjnD/QyZUqR/Z/XDsUs/4ej2nx0g==",
"dependencies": [
"@jridgewell/sourcemap-codec"
]
},
"merge2@1.4.1": {
"integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg=="
},
"micromatch@4.0.8": {
"integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==",
"dependencies": [
"braces",
"picomatch"
]
},
"minimatch@10.0.1": {
"integrity": "sha512-ethXTt3SGGR+95gudmqJ1eNhRO7eGEGIgYA9vnPatK4/etz2MEVDno5GMCibdMTuBMyElzIlgxMna3K94XDIDQ==",
"dependencies": [
"brace-expansion@2.0.1"
]
},
"minimatch@3.1.2": {
"integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
"dependencies": [
"brace-expansion@1.1.11"
]
},
"minimist@1.2.8": {
"integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA=="
},
"minipass@7.1.2": {
"integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw=="
},
"mylas@2.1.13": {
"integrity": "sha512-+MrqnJRtxdF+xngFfUUkIMQrUUL0KsxbADUkn23Z/4ibGg192Q+z+CQyiYwvWTsYjJygmMR8+w3ZDa98Zh6ESg=="
},
"normalize-path@3.0.0": {
"integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA=="
},
"once@1.4.0": {
"integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==",
"dependencies": [
"wrappy"
]
},
"package-json-from-dist@1.0.1": {
"integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw=="
},
"path-is-absolute@1.0.1": {
"integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg=="
},
"path-key@3.1.1": {
"integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q=="
},
"path-parse@1.0.7": {
"integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw=="
},
"path-scurry@2.0.0": {
"integrity": "sha512-ypGJsmGtdXUOeM5u93TyeIEfEhM6s+ljAhrk5vAvSx8uyY/02OvrZnA0YNGUrPXfpJMgI1ODd3nwz8Npx4O4cg==",
"dependencies": [
"lru-cache",
"minipass"
]
},
"path-type@4.0.0": {
"integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw=="
},
"picomatch@2.3.1": {
"integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA=="
},
"plimit-lit@1.6.1": {
"integrity": "sha512-B7+VDyb8Tl6oMJT9oSO2CW8XC/T4UcJGrwOVoNGwOQsQYhlpfajmrMj5xeejqaASq3V/EqThyOeATEOMuSEXiA==",
"dependencies": [
"queue-lit"
]
},
"queue-lit@1.5.2": {
"integrity": "sha512-tLc36IOPeMAubu8BkW8YDBV+WyIgKlYU7zUNs0J5Vk9skSZ4JfGlPOqplP0aHdfv7HL0B2Pg6nwiq60Qc6M2Hw=="
},
"queue-microtask@1.2.3": {
"integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A=="
},
"readdirp@3.6.0": {
"integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==",
"dependencies": [
"picomatch"
]
},
"rechoir@0.6.2": {
"integrity": "sha512-HFM8rkZ+i3zrV+4LQjwQ0W+ez98pApMGM3HUrN04j3CqzPOzl9nmP15Y8YXNm8QHGv/eacOVEjqhmWpkRV0NAw==",
"dependencies": [
"resolve"
]
},
"resolve@1.22.8": {
"integrity": "sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw==",
"dependencies": [
"is-core-module",
"path-parse",
"supports-preserve-symlinks-flag"
]
},
"reusify@1.0.4": {
"integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw=="
},
"rimraf@6.0.1": {
"integrity": "sha512-9dkvaxAsk/xNXSJzMgFqqMCuFgt2+KsOFek3TMLfo8NCPfWpBmqwyNn5Y+NX56QUYfCtsyhF3ayiboEoUmJk/A==",
"dependencies": [
"glob@11.0.0",
"package-json-from-dist"
]
},
"run-parallel@1.2.0": {
"integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==",
"dependencies": [
"queue-microtask"
]
},
"semver@7.6.3": {
"integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A=="
},
"shebang-command@2.0.0": {
"integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==",
"dependencies": [
"shebang-regex"
]
},
"shebang-regex@3.0.0": {
"integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A=="
},
"shelljs@0.8.5": {
"integrity": "sha512-TiwcRcrkhHvbrZbnRcFYMLl30Dfov3HKqzp5tO5b4pt6G/SezKcYhmDg15zXVBswHmctSAQKznqNW2LO5tTDow==",
"dependencies": [
"glob@7.2.3",
"interpret",
"rechoir"
]
},
"shx@0.3.4": {
"integrity": "sha512-N6A9MLVqjxZYcVn8hLmtneQWIJtp8IKzMP4eMnx+nqkvXoqinUPCbUFLp2UcWTEIUONhlk0ewxr/jaVGlc+J+g==",
"dependencies": [
"minimist",
"shelljs"
]
},
"signal-exit@4.1.0": {
"integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw=="
},
"slash@3.0.0": {
"integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q=="
},
"string-width@4.2.3": {
"integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
"dependencies": [
"emoji-regex@8.0.0",
"is-fullwidth-code-point",
"strip-ansi@6.0.1"
]
},
"string-width@5.1.2": {
"integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==",
"dependencies": [
"eastasianwidth",
"emoji-regex@9.2.2",
"strip-ansi@7.1.0"
]
},
"strip-ansi@6.0.1": {
"integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
"dependencies": [
"ansi-regex@5.0.1"
]
},
"strip-ansi@7.1.0": {
"integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==",
"dependencies": [
"ansi-regex@6.1.0"
]
},
"supports-preserve-symlinks-flag@1.0.0": {
"integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w=="
},
"svelte-sonner@0.3.28_svelte@5.2.7__acorn@8.14.0": {
"integrity": "sha512-K3AmlySeFifF/cKgsYNv5uXqMVNln0NBAacOYgmkQStLa/UoU0LhfAACU6Gr+YYC8bOCHdVmFNoKuDbMEsppJg==",
"dependencies": [
"svelte"
]
},
"svelte@5.2.7_acorn@8.14.0": {
"integrity": "sha512-cEhPGuLHiH2+Z8B1FwQgiZJgA39uUmJR4516TKrM5zrp0/cuwJkfhUfcTxhAkznanAF5fXUKzvYR4o+Ksx3ZCQ==",
"dependencies": [
"@ampproject/remapping",
"@jridgewell/sourcemap-codec",
"@types/estree",
"acorn",
"acorn-typescript",
"aria-query",
"axobject-query",
"esm-env",
"esrap",
"is-reference",
"locate-character",
"magic-string",
"zimmerframe"
]
},
"tauri-api-adapter@0.3.13_typescript@5.7.2": {
"integrity": "sha512-ex4z3Zish6by1ew1ialbLc5g3dhly+6ihepJeJwVRQoMJdUTyfCX2dVPYW74i+px1hFXKFu3MYTheS7uBOeCbg==",
"dependencies": [
"@tauri-apps/api@2.1.1",
"@tauri-apps/plugin-dialog",
"@tauri-apps/plugin-fs",
"@tauri-apps/plugin-http",
"@tauri-apps/plugin-log",
"@tauri-apps/plugin-notification",
"@tauri-apps/plugin-os",
"@tauri-apps/plugin-shell",
"@tauri-apps/plugin-upload",
"kkrpc@0.0.12_typescript@5.7.2",
"rimraf",
"shx",
"tauri-plugin-clipboard-api",
"tauri-plugin-network-api@2.0.4_typescript@5.7.2",
"tauri-plugin-shellx-api",
"tauri-plugin-system-info-api@2.0.8_typescript@5.7.2",
"tsc-alias",
"typescript",
"valibot"
]
},
"tauri-plugin-clipboard-api@2.1.11_typescript@5.7.2": {
"integrity": "sha512-VNkGaVPPfRoHg7/rJBcWqsvLvn4/kNEOOlzqwyI9Qdf6g54B3mc31GLZdnq/HWtX0vZskw3J8b/EF9YkASDCBQ==",
"dependencies": [
"@tauri-apps/api@2.0.1",
"valibot"
]
},
"tauri-plugin-network-api@2.0.4": {
"integrity": "sha512-CJWF2g+uQifcIlE/AXUnezVjjbyY0FDBxoz4P6BmjNRR/qubpNMfdUnKLqdjX98o5MIXGW+UnyZTfbJo998dFw==",
"dependencies": [
"@tauri-apps/api@2.1.1",
"valibot"
]
},
"tauri-plugin-network-api@2.0.4_typescript@5.7.2": {
"integrity": "sha512-CJWF2g+uQifcIlE/AXUnezVjjbyY0FDBxoz4P6BmjNRR/qubpNMfdUnKLqdjX98o5MIXGW+UnyZTfbJo998dFw==",
"dependencies": [
"@tauri-apps/api@2.1.1",
"valibot"
]
},
"tauri-plugin-shellx-api@2.0.14": {
"integrity": "sha512-MdSYD2KDw63b7yEIa9Q2GXnbidL5Tk+s92BJX0XvYfHrv2l1fYE2vdRWGnyhvCWmUavyCeiOle5uMxM6QLOb2Q==",
"dependencies": [
"@tauri-apps/api@2.1.1"
]
},
"tauri-plugin-system-info-api@2.0.8": {
"integrity": "sha512-EFdLXNGp6Zu9SNsZCkU+55A8027OnrVw/TQrd0oJHgfZzs4qvm1iMmSvyid4MLftt33iZDhjCzxYijaaOxeKSg==",
"dependencies": [
"@tauri-apps/api@2.1.1",
"valibot"
]
},
"tauri-plugin-system-info-api@2.0.8_typescript@5.7.2": {
"integrity": "sha512-EFdLXNGp6Zu9SNsZCkU+55A8027OnrVw/TQrd0oJHgfZzs4qvm1iMmSvyid4MLftt33iZDhjCzxYijaaOxeKSg==",
"dependencies": [
"@tauri-apps/api@2.1.1",
"valibot"
]
},
"to-regex-range@5.0.1": {
"integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==",
"dependencies": [
"is-number"
]
},
"tsc-alias@1.8.10": {
"integrity": "sha512-Ibv4KAWfFkFdKJxnWfVtdOmB0Zi1RJVxcbPGiCDsFpCQSsmpWyuzHG3rQyI5YkobWwxFPEyQfu1hdo4qLG2zPw==",
"dependencies": [
"chokidar",
"commander",
"globby",
"mylas",
"normalize-path",
"plimit-lit"
]
},
"typescript@5.7.2": {
"integrity": "sha512-i5t66RHxDvVN40HfDd1PsEThGNnlMCMT3jMUuoh9/0TaqWevNontacunWyN02LA9/fIbEWlcHZcgTKb9QoaLfg=="
},
"valibot@0.40.0_typescript@5.7.2": {
"integrity": "sha512-XHKnaVtwHqxPwnGOsLrwka9CEaL7yNeLNp707OKv/bmT29GnPVdl6PxBOZ6BW7hF66/6QT6iVbOlnW7qVPmoKw==",
"dependencies": [
"typescript"
]
},
"which@2.0.2": {
"integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==",
"dependencies": [
"isexe"
]
},
"wrap-ansi@7.0.0": {
"integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
"dependencies": [
"ansi-styles@4.3.0",
"string-width@4.2.3",
"strip-ansi@6.0.1"
]
},
"wrap-ansi@8.1.0": {
"integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==",
"dependencies": [
"ansi-styles@6.2.1",
"string-width@5.1.2",
"strip-ansi@7.1.0"
]
},
"wrappy@1.0.2": {
"integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="
},
"ws@8.18.0": {
"integrity": "sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw=="
},
"zimmerframe@1.1.2": {
"integrity": "sha512-rAbqEGa8ovJy4pyBxZM70hg4pE6gDgaQ0Sl9M3enG3I0d6H4XSAM3GeNGLKnsBpuijUow064sf7ww1nutC5/3w=="
}
},
"workspace": {
"dependencies": [
"jsr:@kunkun/api@^0.0.40",
"jsr:@std/assert@1"
]
}
}

View File

@ -0,0 +1,5 @@
import { expose } from '@kunkun/api/runtime/deno';
expose({
echo: (paths: string[]) => Promise.resolve(paths)
});

View File

@ -16,7 +16,15 @@
"permissions": [
"clipboard:read-text",
"notification:all",
"dialog:all"
"dialog:all",
"shell:kill",
"shell:stdin-write",
{
"permission": "shell:deno:spawn",
"allow": [
{}
]
}
],
"customUiCmds": [
{

View File

@ -1,6 +1,6 @@
<script lang="ts">
import { base } from '$app/paths';
import { clipboard, notification, ui, toast, dialog } from '@kksh/api/ui/iframe';
import { clipboard, notification, ui, toast, dialog, shell } from '@kksh/api/ui/iframe';
import {
ModeToggle,
Button,
@ -27,8 +27,61 @@
console.error(err);
});
}
async function testKkrpc() {
const { rpcChannel, process, command } = await shell.createDenoRpcChannel<
{},
{
echo: (paths: string[]) => Promise<string[]>;
}
>('$EXTENSION/deno-src/index.ts', [], {}, {});
command.stderr.on('data', (data) => {
'';
console.log('stderr', data);
});
// command.stdout.on('data', (data) => {
// console.log('stdout', data);
// });
const api = rpcChannel.getAPI();
await api
.echo([
'/Users/hk/Desktop/_DSC2594.ARW',
'/Users/hk/Desktop/_DSC2597.ARW',
'/Users/hk/Desktop/_DSC2598.ARW',
'/Users/hk/Desktop/DJI_20241128180028_0198_D.JPG',
'/Users/hk/Desktop/_DSC2594.ARW',
'/Users/hk/Desktop/_DSC2597.ARW',
'/Users/hk/Desktop/_DSC2598.ARW',
'/Users/hk/Desktop/DJI_20241128180028_0198_D.JPG',
'/Users/hk/Desktop/_DSC2594.ARW',
'/Users/hk/Desktop/_DSC2597.ARW',
'/Users/hk/Desktop/_DSC2598.ARW',
'/Users/hk/Desktop/DJI_20241128180028_0198_D.JPG',
'/Users/hk/Desktop/_DSC2594.ARW',
'/Users/hk/Desktop/_DSC2597.ARW',
'/Users/hk/Desktop/_DSC2598.ARW',
'/Users/hk/Desktop/DJI_20241128180028_0198_D.JPG',
'/Users/hk/Desktop/_DSC2594.ARW',
'/Users/hk/Desktop/_DSC2597.ARW',
'/Users/hk/Desktop/_DSC2598.ARW',
'/Users/hk/Desktop/DJI_20241128180028_0198_D.JPG',
'/Users/hk/Desktop/_DSC2594.ARW',
'/Users/hk/Desktop/_DSC2597.ARW',
'/Users/hk/Desktop/_DSC2598.ARW',
'/Users/hk/Desktop/DJI_20241128180028_0198_D.JPG',
'/Users/hk/Desktop/_DSC2594.ARW',
'/Users/hk/Desktop/_DSC2597.ARW',
'/Users/hk/Desktop/_DSC2598.ARW',
'/Users/hk/Desktop/DJI_20241128180028_0198_D.JPG'
])
.then(console.log)
.catch(console.error);
process.kill();
}
</script>
<div class="container mt-16">
<div data-kunkun-drag-region class="h-12"></div>
<div class="container">
<Button onclick={showDialog}>Show Dialog</Button>
<Button onclick={testKkrpc}>Test kkrpc</Button>
</div>

177
packages/grpc/.gitignore vendored Normal file
View File

@ -0,0 +1,177 @@
# Based on https://raw.githubusercontent.com/github/gitignore/main/Node.gitignore
# Logs
logs
_.log
npm-debug.log_
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
.pnpm-debug.log*
# Caches
.cache
# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json
# Runtime data
pids
_.pid
_.seed
*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
*.lcov
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
jspm_packages/
# Snowpack dependency directory (https://snowpack.dev/)
web_modules/
# TypeScript cache
*.tsbuildinfo
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional stylelint cache
.stylelintcache
# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variable files
.env
.env.development.local
.env.test.local
.env.production.local
.env.local
# parcel-bundler cache (https://parceljs.org/)
.parcel-cache
# Next.js build output
.next
out
# Nuxt.js build / generate output
.nuxt
dist
# Gatsby files
# Comment in the public line in if your project uses Gatsby and not Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public
# vuepress build output
.vuepress/dist
# vuepress v2.x temp and cache directory
.temp
# Docusaurus cache and generated files
.docusaurus
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# TernJS port file
.tern-port
# Stores VSCode versions used for testing VSCode extensions
.vscode-test
# yarn v2
.yarn/cache
.yarn/unplugged
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.*
# IntelliJ based IDEs
.idea
# Finder (MacOS) folder config
.DS_Store
src/protos

16
packages/grpc/Cargo.toml Normal file
View File

@ -0,0 +1,16 @@
[package]
name = "grpc"
version = "0.1.0"
edition = "2021"
[dependencies]
serde = { workspace = true }
serde_json = { workspace = true }
tonic = { version = "0.11", features = ["tls"] }
prost = "0.12"
uuid = { version = "1.8.0", features = ["v4"] }
anyhow = { workspace = true }
# prost-types = { version = "0.13", features = ["serde"] }
[build-dependencies]
tonic-build = "0.11"

14
packages/grpc/build.rs Normal file
View File

@ -0,0 +1,14 @@
fn main() {
let out_dir = std::path::PathBuf::from(std::env::var("OUT_DIR").unwrap());
tonic_build::configure()
.type_attribute(
"file_transfer.FileNode",
"#[derive(serde::Serialize, serde::Deserialize)] #[serde(rename_all = \"camelCase\")]",
)
.file_descriptor_set_path(out_dir.join("kk_grpc.bin"))
.compile(
&["./protos/file-transfer.proto", "./protos/kunkun.proto"],
&["./protos"],
)
.expect("Failed to compile protos");
}

44
packages/grpc/build.ts Normal file
View File

@ -0,0 +1,44 @@
import fs from "fs"
import os from "os"
import path from "path"
import { fileURLToPath } from "url"
import { $ } from "bun"
// skip this if on Windows, protoc-gen-ts is not supported
if (os.platform() === "win32") {
console.log("Skipping build on Windows")
process.exit(0)
}
console.log("process.env.CF_PAGES", Bun.env.CF_PAGES)
console.log("process.env.CF_PAGES_URL", Bun.env.CF_PAGES_URL)
console.log("process.env.CF_PAGES_BRANCH", Bun.env.CF_PAGES_BRANCH)
console.log("process.env.CF_PAGES_COMMIT_SHA", Bun.env.CF_PAGES_COMMIT_SHA)
if (Bun.env.CF_PAGES_URL) {
console.log("Skipping build in Cloudflare Pages, as cloudflare pages does not have protoc")
process.exit(0)
}
const filepath = fileURLToPath(import.meta.url)
const __dirname = path.dirname(filepath)
const srcPath = path.join(__dirname, "src")
if (!fs.existsSync(srcPath)) {
fs.mkdirSync(srcPath)
}
fs.rmSync(path.join(__dirname, "src/protos"), { recursive: true, force: true })
const protosDir = path.join(__dirname, "protos")
for (const file of fs.readdirSync(protosDir)) {
if (file.endsWith(".proto")) {
try {
await $`
protoc \
--plugin=protoc-gen-ts=./node_modules/.bin/protoc-gen-ts \
--ts_out=./src \
-I . \
./protos/${file}`
} catch (error) {
console.error(error)
}
}
}

39
packages/grpc/dev.ts Normal file
View File

@ -0,0 +1,39 @@
import fs from "fs"
import path from "path"
import tls from "tls"
import { fileURLToPath } from "url"
import grpc from "@grpc/grpc-js"
import * as kk from "./src/protos/kunkun"
// Create channel credentials with SSL verification disabled
const filepath = fileURLToPath(import.meta.url)
const __dirname = path.dirname(filepath)
const certPath = path.join(__dirname, "../tauri-plugins/jarvis/self_signed_certs/cert.pem")
const credentials = grpc.credentials.createSsl(fs.readFileSync(certPath))
// const client = new kk.kunkun.KunkunClient("localhost:9559", credentials) // trust SSL cert
// const originalCreateSecureContext = tls.createSecureContext
// tls.createSecureContext = (options) => {
// if (options && options.rejectUnauthorized === false) {
// options.checkServerIdentity = () => undefined // Skip validation
// }
// return originalCreateSecureContext(options)
// }
const insecureSslCredentials = grpc.credentials.createSsl(
null, // Pass `null` to skip validation
null, // Client key (not used here)
null // Client certificate (not used here)
)
const client = new kk.kunkun.KunkunClient("localhost:9559", insecureSslCredentials) // skip SSL cert validation
client.ServerInfo(new kk.kunkun.Empty(), (err, response) => {
console.log(response)
console.log("public_key", response?.public_key)
console.log("ssl_cert", response?.ssl_cert)
if (err) {
console.error(err)
}
})
// To trust any SSL cert, set NODE_TLS_REJECT_UNAUTHORIZED='0'

0
packages/grpc/index.ts Normal file
View File

View File

@ -0,0 +1,23 @@
{
"name": "@kksh/grpc",
"module": "index.ts",
"type": "module",
"scripts": {
"prepare": "bun build.ts",
"build": "bun build.ts"
},
"devDependencies": {
"@grpc/proto-loader": "^0.7.13",
"@types/bun": "latest",
"@types/google-protobuf": "^3.15.12",
"protoc-gen-ts": "^0.8.7",
"ts-proto": "^2.3.0"
},
"peerDependencies": {
"typescript": "^5.0.0"
},
"dependencies": {
"@grpc/grpc-js": "^1.12.2",
"google-protobuf": "^3.21.4"
}
}

View File

@ -1,25 +1,30 @@
syntax = "proto3";
package file_transfer;
service FileTransfer {
rpc StartTransfer (StartTransferRequest) returns (StartTransferResponse);
rpc SendTransferInfo(TransferInfo) returns (SendTransferInfoResponse);
}
enum FileType {
FILE = 0;
DIRECTORY = 1;
}
message FileNode {
string filename = 1;
uint64 file_size = 2;
string id = 3;
FileType type = 4;
repeated FileNode children = 5;
}
message StartTransferRequest {
string ssl_cert = 1;
string port = 1;
string code = 2;
string ssl_cert = 3;
FileNode root = 4;
}
message StartTransferResponse {
string port = 1;
// bool accept = 1;
}
message TransferInfo {
string filename = 1;
string code = 2;
int32 port = 3;
}
message SendTransferInfoResponse {}

View File

@ -0,0 +1,16 @@
syntax = "proto3";
package kunkun;
service Kunkun {
rpc ServerInfo (Empty) returns (ServerInfoResponse);
rpc HMR (Empty) returns (Empty);
}
message Empty {}
message ServerInfoResponse {
string service_name = 1;
string service_version = 2;
string public_key = 3;
string ssl_cert = 4; // self-signed ssl cert other hosts need to trust to set up secure connection
}

2
packages/grpc/src/lib.rs Normal file
View File

@ -0,0 +1,2 @@
mod mods;
pub use mods::{file_transfer::file_transfer, kunkun::kunkun};

View File

@ -0,0 +1,8 @@
use grpc;
fn main() {
println!(
"file_transfer: {:?}",
grpc::file_transfer::FILE_DESCRIPTOR_SET
);
}

View File

@ -0,0 +1,4 @@
pub mod file_transfer {
tonic::include_proto!("file_transfer"); // The string specified here must match the proto package name
pub const FILE_DESCRIPTOR_SET: &[u8] = tonic::include_file_descriptor_set!("kk_grpc");
}

View File

@ -0,0 +1,4 @@
pub mod kunkun {
tonic::include_proto!("kunkun"); // The string specified here must match the proto package name
pub const FILE_DESCRIPTOR_SET: &[u8] = tonic::include_file_descriptor_set!("kk_grpc");
}

View File

@ -0,0 +1,2 @@
pub mod file_transfer;
pub mod kunkun;

View File

@ -0,0 +1,27 @@
{
"compilerOptions": {
// Enable latest features
"lib": ["ESNext", "DOM"],
"target": "ESNext",
"module": "ESNext",
"moduleDetection": "force",
"jsx": "react-jsx",
"allowJs": true,
// Bundler mode
"moduleResolution": "bundler",
"allowImportingTsExtensions": true,
"verbatimModuleSyntax": true,
"noEmit": true,
// Best practices
"strict": true,
"skipLibCheck": true,
"noFallthroughCasesInSwitch": true,
// Some stricter flags (disabled by default)
"noUnusedLocals": false,
"noUnusedParameters": false,
"noPropertyAccessFromIndexSignature": false
}
}

View File

@ -1,10 +1,8 @@
#![cfg(target_os = "macos")]
use localauthentication_rs::{LAPolicy, LocalAuthentication};
use objc::runtime::{Object, BOOL, NO, YES};
use objc::{msg_send, sel, sel_impl};
use objc::runtime::{BOOL, YES};
use serde::{Deserialize, Serialize};
use std::ptr;
#[link(name = "CoreGraphics", kind = "framework")]
extern "C" {

View File

@ -24,6 +24,6 @@
"dependencies": {
"@aws-sdk/client-s3": "^3.583.0",
"@kksh/api": "workspace:*",
"valibot": "^0.40.0"
"valibot": "^1.0.0-beta.9"
}
}

View File

@ -4,9 +4,9 @@ import {
PutObjectCommand,
S3Client
} from "@aws-sdk/client-s3"
import { toJSONSchema } from "@gcornut/valibot-json-schema"
import { ExtPackageJson } from "@kksh/api/models"
import { parse, string } from "valibot"
import * as v from "valibot"
import { getJsonSchema } from "../src"
const s3Client = new S3Client({

View File

@ -1,6 +1,7 @@
import { ExtPackageJson } from "@kksh/api/models"
import { createSB } from "@kksh/supabase"
import { parse, string } from "valibot"
import * as v from "valibot"
import { getJsonSchema } from "../src"
const supabase = createSB(

View File

@ -1,5 +1,6 @@
import { toJsonSchema } from "@valibot/to-json-schema"
import * as v from "valibot"
export function getJsonSchema(schema: any) {
return JSON.stringify(toJsonSchema(schema), null, 2)
export function getJsonSchema(schema: v.ObjectSchema<any, any>) {
return JSON.stringify(toJsonSchema(v.objectWithRest(schema.entries, v.any())), null, 2)
}

View File

@ -22,28 +22,39 @@ tauri-plugin-store = "2.0.1"
axum = { workspace = true }
axum-extra = { workspace = true }
axum-server = { workspace = true }
tower = { version = "0.4", features = ["util"] }
tower-http = { version = "0.4.0", features = ["fs", "trace", "cors"] }
tonic = "0.11"
tower = { workspace = true }
tower-http = { workspace = true }
tonic = { version = "0.11", features = ["tls"] }
tonic-reflection = "0.11.0"
uuid = { version = "1.8.0", features = ["v4"] }
prost = "0.12"
mime_guess = "2.0.5"
tokio = { workspace = true }
tokio-stream = "0.1.16"
tokio-util = { workspace = true }
mdns-sd = { workspace = true }
reqwest = { workspace = true }
tauri-plugin-network = { workspace = true }
db = { workspace = true }
rustls = { workspace = true }
tauri-plugin-clipboard = { workspace = true }
tauri-plugin-upload = { workspace = true }
log = { workspace = true }
strum = { workspace = true }
chrono = { workspace = true }
strum_macros = { workspace = true }
mac-security-rs = { workspace = true }
sysinfo = { workspace = true }
openssl = { workspace = true }
zip = "1.1.4"
rust_search = "2.1.0"
plist = "1.7.0"
crypto = { workspace = true }
base64 = { workspace = true }
obfstr = { workspace = true }
grpc = { workspace = true }
futures-util = "0.3.31"
rayon = { workspace = true }
[target.'cfg(target_os = "macos")'.dependencies]
tauri-icns = "0.1.0"

View File

@ -114,6 +114,14 @@ const COMMANDS: &[&str] = &[
/* MDNS */
/* -------------------------------------------------------------------------- */
"get_peers",
/* -------------------------------------------------------------------------- */
/* File Transfer */
/* -------------------------------------------------------------------------- */
"get_file_transfer_bucket_keys",
"get_file_transfer_bucket_by_key",
"local_net_send_file",
"download_files",
"file_transfer_preview_bucket",
];
fn main() {
@ -141,8 +149,11 @@ fn main() {
tonic_build::configure()
.file_descriptor_set_path(out_dir.join("kk_grpc.bin"))
.compile(
&["proto/helloworld.proto", "proto/file-transfer.proto"],
&["proto"],
&[
"../../grpc/protos/file-transfer.proto",
"../../grpc/protos/kunkun.proto",
],
&["../../grpc/protos"],
)
.expect("Failed to compile protos");

View File

@ -90,4 +90,10 @@ commands.allow = [
"request_screen_capture_access",
"check_screen_capture_access",
"get_peers",
# File Transfer
"get_file_transfer_bucket_keys",
"get_file_transfer_bucket_by_key",
"local_net_send_file",
"download_files",
"file_transfer_preview_bucket",
]

View File

@ -0,0 +1,13 @@
# Automatically generated - DO NOT EDIT!
"$schema" = "../../schemas/schema.json"
[[permission]]
identifier = "allow-download-file"
description = "Enables the download_file command without any pre-configured scope."
commands.allow = ["download_file"]
[[permission]]
identifier = "deny-download-file"
description = "Denies the download_file command without any pre-configured scope."
commands.deny = ["download_file"]

View File

@ -0,0 +1,13 @@
# Automatically generated - DO NOT EDIT!
"$schema" = "../../schemas/schema.json"
[[permission]]
identifier = "allow-download-files"
description = "Enables the download_files command without any pre-configured scope."
commands.allow = ["download_files"]
[[permission]]
identifier = "deny-download-files"
description = "Denies the download_files command without any pre-configured scope."
commands.deny = ["download_files"]

View File

@ -0,0 +1,13 @@
# Automatically generated - DO NOT EDIT!
"$schema" = "../../schemas/schema.json"
[[permission]]
identifier = "allow-file-transfer-preview-bucket"
description = "Enables the file_transfer_preview_bucket command without any pre-configured scope."
commands.allow = ["file_transfer_preview_bucket"]
[[permission]]
identifier = "deny-file-transfer-preview-bucket"
description = "Denies the file_transfer_preview_bucket command without any pre-configured scope."
commands.deny = ["file_transfer_preview_bucket"]

View File

@ -0,0 +1,13 @@
# Automatically generated - DO NOT EDIT!
"$schema" = "../../schemas/schema.json"
[[permission]]
identifier = "allow-get-file-transfer-bucket-by-key"
description = "Enables the get_file_transfer_bucket_by_key command without any pre-configured scope."
commands.allow = ["get_file_transfer_bucket_by_key"]
[[permission]]
identifier = "deny-get-file-transfer-bucket-by-key"
description = "Denies the get_file_transfer_bucket_by_key command without any pre-configured scope."
commands.deny = ["get_file_transfer_bucket_by_key"]

View File

@ -0,0 +1,13 @@
# Automatically generated - DO NOT EDIT!
"$schema" = "../../schemas/schema.json"
[[permission]]
identifier = "allow-get-file-transfer-bucket-keys"
description = "Enables the get_file_transfer_bucket_keys command without any pre-configured scope."
commands.allow = ["get_file_transfer_bucket_keys"]
[[permission]]
identifier = "deny-get-file-transfer-bucket-keys"
description = "Denies the get_file_transfer_bucket_keys command without any pre-configured scope."
commands.deny = ["get_file_transfer_bucket_keys"]

View File

@ -0,0 +1,13 @@
# Automatically generated - DO NOT EDIT!
"$schema" = "../../schemas/schema.json"
[[permission]]
identifier = "allow-get-files-to-send"
description = "Enables the get_files_to_send command without any pre-configured scope."
commands.allow = ["get_files_to_send"]
[[permission]]
identifier = "deny-get-files-to-send"
description = "Denies the get_files_to_send command without any pre-configured scope."
commands.deny = ["get_files_to_send"]

View File

@ -0,0 +1,13 @@
# Automatically generated - DO NOT EDIT!
"$schema" = "../../schemas/schema.json"
[[permission]]
identifier = "allow-local-net-send-file"
description = "Enables the local_net_send_file command without any pre-configured scope."
commands.allow = ["local_net_send_file"]
[[permission]]
identifier = "deny-local-net-send-file"
description = "Denies the local_net_send_file command without any pre-configured scope."
commands.deny = ["local_net_send_file"]

View File

@ -362,6 +362,58 @@ Denies the delete_extension_data_by_id command without any pre-configured scope.
<tr>
<td>
`jarvis:allow-download-file`
</td>
<td>
Enables the download_file command without any pre-configured scope.
</td>
</tr>
<tr>
<td>
`jarvis:deny-download-file`
</td>
<td>
Denies the download_file command without any pre-configured scope.
</td>
</tr>
<tr>
<td>
`jarvis:allow-download-files`
</td>
<td>
Enables the download_files command without any pre-configured scope.
</td>
</tr>
<tr>
<td>
`jarvis:deny-download-files`
</td>
<td>
Denies the download_files command without any pre-configured scope.
</td>
</tr>
<tr>
<td>
`jarvis:allow-eject-all-disks`
</td>
@ -440,6 +492,32 @@ Denies the file_search command without any pre-configured scope.
<tr>
<td>
`jarvis:allow-file-transfer-preview-bucket`
</td>
<td>
Enables the file_transfer_preview_bucket command without any pre-configured scope.
</td>
</tr>
<tr>
<td>
`jarvis:deny-file-transfer-preview-bucket`
</td>
<td>
Denies the file_transfer_preview_bucket command without any pre-configured scope.
</td>
</tr>
<tr>
<td>
`jarvis:allow-get-all-extensions`
</td>
@ -726,6 +804,84 @@ Denies the get_extension_folder command without any pre-configured scope.
<tr>
<td>
`jarvis:allow-get-file-transfer-bucket-by-key`
</td>
<td>
Enables the get_file_transfer_bucket_by_key command without any pre-configured scope.
</td>
</tr>
<tr>
<td>
`jarvis:deny-get-file-transfer-bucket-by-key`
</td>
<td>
Denies the get_file_transfer_bucket_by_key command without any pre-configured scope.
</td>
</tr>
<tr>
<td>
`jarvis:allow-get-file-transfer-bucket-keys`
</td>
<td>
Enables the get_file_transfer_bucket_keys command without any pre-configured scope.
</td>
</tr>
<tr>
<td>
`jarvis:deny-get-file-transfer-bucket-keys`
</td>
<td>
Denies the get_file_transfer_bucket_keys command without any pre-configured scope.
</td>
</tr>
<tr>
<td>
`jarvis:allow-get-files-to-send`
</td>
<td>
Enables the get_files_to_send command without any pre-configured scope.
</td>
</tr>
<tr>
<td>
`jarvis:deny-get-files-to-send`
</td>
<td>
Denies the get_files_to_send command without any pre-configured scope.
</td>
</tr>
<tr>
<td>
`jarvis:allow-get-history`
</td>
@ -960,6 +1116,32 @@ Denies the is_window_label_registered command without any pre-configured scope.
<tr>
<td>
`jarvis:allow-local-net-send-file`
</td>
<td>
Enables the local_net_send_file command without any pre-configured scope.
</td>
</tr>
<tr>
<td>
`jarvis:deny-local-net-send-file`
</td>
<td>
Denies the local_net_send_file command without any pre-configured scope.
</td>
</tr>
<tr>
<td>
`jarvis:allow-logout-user`
</td>

View File

@ -429,6 +429,26 @@
"type": "string",
"const": "deny-delete-extension-data-by-id"
},
{
"description": "Enables the download_file command without any pre-configured scope.",
"type": "string",
"const": "allow-download-file"
},
{
"description": "Denies the download_file command without any pre-configured scope.",
"type": "string",
"const": "deny-download-file"
},
{
"description": "Enables the download_files command without any pre-configured scope.",
"type": "string",
"const": "allow-download-files"
},
{
"description": "Denies the download_files command without any pre-configured scope.",
"type": "string",
"const": "deny-download-files"
},
{
"description": "Enables the eject_all_disks command without any pre-configured scope.",
"type": "string",
@ -459,6 +479,16 @@
"type": "string",
"const": "deny-file-search"
},
{
"description": "Enables the file_transfer_preview_bucket command without any pre-configured scope.",
"type": "string",
"const": "allow-file-transfer-preview-bucket"
},
{
"description": "Denies the file_transfer_preview_bucket command without any pre-configured scope.",
"type": "string",
"const": "deny-file-transfer-preview-bucket"
},
{
"description": "Enables the get_all_extensions command without any pre-configured scope.",
"type": "string",
@ -569,6 +599,36 @@
"type": "string",
"const": "deny-get-extension-folder"
},
{
"description": "Enables the get_file_transfer_bucket_by_key command without any pre-configured scope.",
"type": "string",
"const": "allow-get-file-transfer-bucket-by-key"
},
{
"description": "Denies the get_file_transfer_bucket_by_key command without any pre-configured scope.",
"type": "string",
"const": "deny-get-file-transfer-bucket-by-key"
},
{
"description": "Enables the get_file_transfer_bucket_keys command without any pre-configured scope.",
"type": "string",
"const": "allow-get-file-transfer-bucket-keys"
},
{
"description": "Denies the get_file_transfer_bucket_keys command without any pre-configured scope.",
"type": "string",
"const": "deny-get-file-transfer-bucket-keys"
},
{
"description": "Enables the get_files_to_send command without any pre-configured scope.",
"type": "string",
"const": "allow-get-files-to-send"
},
{
"description": "Denies the get_files_to_send command without any pre-configured scope.",
"type": "string",
"const": "deny-get-files-to-send"
},
{
"description": "Enables the get_history command without any pre-configured scope.",
"type": "string",
@ -659,6 +719,16 @@
"type": "string",
"const": "deny-is-window-label-registered"
},
{
"description": "Enables the local_net_send_file command without any pre-configured scope.",
"type": "string",
"const": "allow-local-net-send-file"
},
{
"description": "Denies the local_net_send_file command without any pre-configured scope.",
"type": "string",
"const": "deny-local-net-send-file"
},
{
"description": "Enables the logout_user command without any pre-configured scope.",
"type": "string",

View File

@ -1,14 +0,0 @@
syntax = "proto3";
package helloworld;
service Greeter {
rpc SayHello (HelloRequest) returns (HelloReply);
}
message HelloRequest {
string name = 1;
}
message HelloReply {
string message = 1;
}

View File

@ -3,7 +3,7 @@ use db::{
ExtDataField, JarvisDB, SQLSortOrder,
};
use std::{path::PathBuf, sync::Mutex};
use tauri::{utils::acl::identifier, State};
use tauri::State;
use crate::utils::db::get_db;

View File

@ -1,5 +1,84 @@
use std::{collections::HashMap, sync::Mutex};
use tauri_plugin_network::network::mdns::ServiceInfoMod;
use std::{collections::HashMap, net::IpAddr, sync::Mutex};
use crate::server::{
grpc::{
client::get_grpc_tls_channel,
// kunkun::kunkun::{kunkun_client::KunkunClient, Empty, ServerInfoResponse},
},
model::ServerInfo,
};
use grpc::kunkun::{kunkun_client::KunkunClient, Empty, ServerInfoResponse};
// use crate::server::grpc::kunkun::kunkun::Empty
use mdns_sd::ServiceInfo;
use serde::{Deserialize, Serialize};
// use tonic::transport::{Certificate, Channel, ClientTlsConfig};
#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ServiceInfoMod {
pub addresses: Vec<IpAddr>,
pub fullname: String, // <instance>.<service>.<domain>
pub hostname: String,
pub port: u16,
pub service_type: String, // Returns the service type including the domain label. For example: "_my-service._udp.local.".
pub sub_type: Option<String>, // Returns the service subtype including the domain label, if subtype has been defined. For example: "_printer._sub._http._tcp.local.".
pub properties: HashMap<String, String>,
pub public_key: String,
pub ssl_cert: String,
}
impl ServiceInfoMod {
pub async fn from(info: ServiceInfo) -> anyhow::Result<Self> {
let properties = info
.get_properties()
.iter()
.map(|property| (property.key().to_string(), property.val_str().to_string()))
.collect::<HashMap<_, _>>();
// Send ServerInfo gRPC request to get public_key and ssl_cert
let addresses: Vec<IpAddr> = info.get_addresses().iter().cloned().collect();
if addresses.len() == 0 {
return Err(anyhow::anyhow!("No addresses found"));
}
/* -------------------------------------------------------------------------- */
/* TODO: Remove this rest API workaround */
/* -------------------------------------------------------------------------- */
// send a rest request to get public_key and ssl_cert with reqwest, trust the cert
let client = reqwest::Client::builder()
.danger_accept_invalid_certs(true)
.build()
.unwrap();
let server_info = client
.get(format!("https://{}:9559/info", addresses[0]))
.send()
.await?
.json::<ServerInfo>()
.await?;
/* -------------------------------------------------------------------------- */
/* TODO: Remove this rest API workaround */
/* -------------------------------------------------------------------------- */
// I was not able to find a way to disable certificate check with tonic client, so I had to first get ssl cert from rest api with reqwest
// then use it to create a grpc channel that trusts the known cert
let grpc_tls_channel =
get_grpc_tls_channel(&addresses[0].to_string(), 9559, &server_info.ssl_cert).await?;
let mut client = KunkunClient::new(grpc_tls_channel.clone());
let response: tonic::Response<ServerInfoResponse> = client.server_info(Empty {}).await?;
let server_info = response.into_inner();
let public_key = server_info.public_key;
let ssl_cert = server_info.ssl_cert;
Ok(Self {
addresses: info.get_addresses().iter().cloned().collect(),
fullname: info.get_fullname().to_string(),
hostname: info.get_hostname().to_string(),
port: info.get_port(),
service_type: info.get_type().to_string(),
sub_type: info.get_subtype().clone(),
properties,
public_key,
ssl_cert,
})
}
}
#[derive(Default, Debug)]
pub struct Peers {
@ -7,7 +86,7 @@ pub struct Peers {
}
impl Peers {
pub fn add_peer(&self, peer: ServiceInfoMod) {
pub async fn add_peer(&self, peer: ServiceInfoMod) {
let mut peers = self.peers.lock().unwrap();
peers.insert(peer.hostname.clone(), peer);
}
@ -41,6 +120,5 @@ pub async fn get_peers(
state: tauri::State<'_, Peers>,
) -> Result<HashMap<String, ServiceInfoMod>, String> {
let _peers = state.peers.lock().unwrap();
println!("get_peers: {:?}", _peers);
Ok(_peers.to_owned())
}

View File

@ -223,7 +223,7 @@ pub async fn spawn_extension_file_server<R: Runtime>(
.unwrap();
});
// add server handle and shutdown handle to extension
let mut ext = label_ext_map.get_mut(window_label.as_str()).unwrap();
let ext = label_ext_map.get_mut(window_label.as_str()).unwrap();
ext.server_handle.lock().unwrap().replace(server_handle);
ext.shutdown_handle.lock().unwrap().replace(shutdown_handle);

View File

@ -0,0 +1,275 @@
use crate::{
models::{FileTransferState, FilesBucket, PreviewFileTransferBucket},
server::grpc::{
client::get_grpc_tls_channel,
file_transfer::{self as ft, compute_total_size, count_file_nodes, FileTransferPayload},
},
utils::{reqwest::build_ssl_reqwest_client, transfer_stats::TransferStats},
};
use futures_util::TryStreamExt;
use grpc::file_transfer::{
file_transfer_client::FileTransferClient, FileNode, FileType, StartTransferRequest,
StartTransferResponse,
};
use std::{
collections::HashSet,
path::{Path, PathBuf},
time::Instant,
};
use tauri::ipc::Channel;
use tokio::{
fs::File,
io::{AsyncWriteExt, BufWriter},
sync::mpsc,
};
use uuid::Uuid;
#[derive(Clone, serde::Serialize)]
#[serde(rename_all = "camelCase")]
pub struct ProgressPayload {
pub code: String,
pub progress_bytes: u128,
pub total_bytes: u128,
pub transfer_speed_bytes_per_second: f64,
pub current_file_name: String,
pub total_files: usize,
pub current_file_index: usize,
}
#[derive(Debug)]
struct FileDownloadProgress {
file_name: String,
bytes_transferred: u128,
}
async fn download_file(
url: &str,
code: &str,
file_path: &Path,
client: &reqwest::Client,
stats: &mut TransferStats,
progress_tx: mpsc::Sender<FileDownloadProgress>,
) -> anyhow::Result<()> {
let response = client.get(url).header("Authorization", code).send().await?;
let mut file = BufWriter::new(File::create(file_path).await?);
let mut stream = response.bytes_stream();
let file_name = file_path
.file_name()
.and_then(|n| n.to_str())
.unwrap_or("unknown")
.to_string();
while let Some(chunk) = stream.try_next().await? {
file.write_all(&chunk).await?;
stats.record_chunk_transfer(chunk.len());
progress_tx
.send(FileDownloadProgress {
file_name: file_name.clone(),
bytes_transferred: chunk.len() as u128,
})
.await
.ok();
}
file.flush().await?;
Ok(())
}
async fn download_file_node_recursively(
node: &FileNode,
dir: &Path,
config: &FileTransferPayload,
client: &reqwest::Client,
stats: &mut TransferStats,
progress_tx: mpsc::Sender<FileDownloadProgress>,
) -> anyhow::Result<()> {
let mut stack = vec![(node.clone(), dir.to_path_buf())];
while let Some((current_node, current_dir)) = stack.pop() {
let download_path = current_dir.join(&current_node.filename);
if current_node.r#type == FileType::File as i32 {
let url = format!(
"https://{ip}:{port}/download-file?id={id}",
ip = config.ip,
port = config.port,
id = current_node.id
);
download_file(
&url,
&config.code,
&download_path,
client,
stats,
progress_tx.clone(),
)
.await?;
} else if current_node.r#type == FileType::Directory as i32 {
tokio::fs::create_dir_all(&download_path).await?;
for child in current_node.children.iter() {
stack.push((child.clone(), download_path.clone()));
}
} else {
return Err(anyhow::anyhow!(
"Invalid file type: {}",
current_node.r#type
));
}
}
Ok(())
}
#[tauri::command]
pub async fn download_files(
mut payload: FileTransferPayload,
save_dir: PathBuf,
on_progress: Channel<ProgressPayload>,
file_transfer_state: tauri::State<'_, FileTransferState>,
) -> Result<(), String> {
if !save_dir.exists() {
return Err(format!(
"Save directory does not exist: {}",
save_dir.display()
));
}
let (progress_tx, mut progress_rx) = mpsc::channel::<FileDownloadProgress>(100);
let total_bytes = compute_total_size(&payload.root);
let total_files = count_file_nodes(&payload.root);
let client = build_ssl_reqwest_client(None, Some(payload.ssl_cert.clone()))
.map_err(|e| e.to_string())?;
if payload.root.filename.is_empty() {
let timestamp = chrono::Utc::now().format("%Y-%m-%d_%H-%M-%S").to_string();
payload.root.filename = timestamp;
}
let mut stats = TransferStats::default();
let start_time = Instant::now();
let on_progress_clone = on_progress.clone();
let mut files_received: HashSet<String> = HashSet::new();
let code = payload.code.clone();
tokio::spawn(async move {
let mut total_transferred = 0u128;
let mut unreported_bytes = 0u128;
const REPORT_THRESHOLD: u128 = 1024 * 1024; // 1MB in bytes
while let Some(progress) = progress_rx.recv().await {
total_transferred += progress.bytes_transferred;
unreported_bytes += progress.bytes_transferred;
files_received.insert(progress.file_name.clone());
// Report progress when we've accumulated ~1MB of unreported bytes or reached the end
if unreported_bytes >= REPORT_THRESHOLD || total_transferred == total_bytes {
let bytes_per_second =
total_transferred as f64 / start_time.elapsed().as_secs_f64();
let progress_payload = ProgressPayload {
code: code.clone(),
progress_bytes: total_transferred,
total_bytes,
transfer_speed_bytes_per_second: bytes_per_second,
current_file_name: progress.file_name,
total_files,
current_file_index: files_received.len(),
};
on_progress_clone.send(progress_payload).ok();
unreported_bytes = 0;
}
}
});
let download_path = save_dir.join(&payload.root.filename);
std::fs::create_dir_all(&download_path).map_err(|e| e.to_string())?;
download_file_node_recursively(
&payload.root,
save_dir.as_path(),
&payload,
&client,
&mut stats,
progress_tx,
)
.await
.map_err(|e| e.to_string())?;
Ok(())
}
#[tauri::command]
pub async fn get_file_transfer_bucket_keys(
ft_state: tauri::State<'_, FileTransferState>,
) -> Result<Vec<String>, String> {
let buckets = ft_state.buckets.lock().unwrap();
Ok(buckets.keys().cloned().collect())
}
#[tauri::command]
pub async fn get_file_transfer_bucket_by_key(
ft_state: tauri::State<'_, FileTransferState>,
key: String,
) -> Result<FilesBucket, String> {
let buckets = ft_state.buckets.lock().unwrap();
let bucket = match buckets.get(&key) {
Some(b) => b,
None => return Err(format!("Bucket not found for key: {}", key)),
};
Ok(bucket.clone())
}
#[tauri::command]
pub async fn file_transfer_preview_bucket(
files: Vec<PathBuf>,
) -> Result<PreviewFileTransferBucket, String> {
let (id_path_map, root) =
ft::build_file_node_and_id_path_map(&files).map_err(|err| err.to_string())?;
let total_bytes = compute_total_size(&root);
let total_files = count_file_nodes(&root);
Ok(PreviewFileTransferBucket {
total_bytes,
total_files,
})
}
#[tauri::command]
pub async fn local_net_send_file(
file_transfer: tauri::State<'_, FileTransferState>,
files_to_send: Vec<PathBuf>,
ip: String,
port: u16,
cert_pem: String,
) -> Result<(), String> {
let uuid = Uuid::new_v4().to_string();
let (id_path_map, root) =
ft::build_file_node_and_id_path_map(&files_to_send).map_err(|err| err.to_string())?;
// Scope the MutexGuard to drop it before await
{
let mut buckets = file_transfer.buckets.lock().unwrap();
buckets.insert(
uuid.clone(),
FilesBucket {
code: uuid.clone(),
id_path_map,
},
);
}
let tls_channel = get_grpc_tls_channel(&ip, port, &cert_pem)
.await
.map_err(|err| err.to_string())?;
let mut client = FileTransferClient::new(tls_channel);
// Send the transfer request
let response: tonic::Response<StartTransferResponse> = client
.start_transfer(StartTransferRequest {
port: port.to_string(),
root: Some(root),
code: uuid.clone(),
ssl_cert: cert_pem,
})
.await
.map_err(|e| e.to_string())?;
Ok(())
}

View File

@ -9,6 +9,7 @@ pub mod fs;
pub mod path;
pub mod server;
// pub mod storage;
pub mod file_transfer;
pub mod security;
pub mod system;
pub mod utils;

View File

@ -1,5 +1,4 @@
use crate::{model::app_state, server::http::Server};
use std::path::PathBuf;
use crate::server::http::Server;
#[tauri::command]
pub async fn start_server(server: tauri::State<'_, Server>) -> Result<(), String> {

View File

@ -1,14 +0,0 @@
use serde::de::DeserializeOwned;
use tauri::{plugin::PluginApi, AppHandle, Runtime};
pub fn init<R: Runtime, C: DeserializeOwned>(
app: &AppHandle<R>,
_api: PluginApi<R, C>,
) -> crate::Result<Jarvis<R>> {
Ok(Jarvis(app.clone()))
}
/// Access to the jarvis APIs.
pub struct Jarvis<R: Runtime>(AppHandle<R>);
impl<R: Runtime> Jarvis<R> {}

View File

@ -1,7 +1,10 @@
use commands::discovery::Peers;
use db::JarvisDB;
use model::extension::Extension;
use server::Protocol;
use models::FileTransferState;
use openssl::{
pkey::{Private, Public},
rsa::Rsa,
};
use tauri::{
plugin::{Builder, TauriPlugin},
Manager, Runtime,
@ -14,15 +17,9 @@ pub mod setup;
pub mod syscmds;
pub mod utils;
pub use db;
use std::{collections::HashMap, path::PathBuf, sync::Mutex};
use tauri_plugin_store::StoreBuilder;
use utils::{
path::{get_default_extensions_dir, get_kunkun_db_path},
settings::AppSettings,
};
use std::{collections::HashMap, sync::Mutex};
use utils::path::get_kunkun_db_path;
#[cfg(desktop)]
mod desktop;
#[cfg(mobile)]
mod mobile;
@ -32,24 +29,26 @@ mod models;
pub use error::{Error, Result};
#[cfg(desktop)]
use desktop::Jarvis;
#[cfg(mobile)]
use mobile::Jarvis;
#[derive(Default)]
pub struct JarvisState {
pub window_label_ext_map: Mutex<HashMap<String, Extension>>,
// the pair of RSA keys are newly generated everytime the app is started and store only in memory, used for encryption and signing
pub rsa_private_key: Rsa<Private>,
pub rsa_public_key: Rsa<Public>,
}
/// Extensions to [`tauri::App`], [`tauri::AppHandle`] and [`tauri::Window`] to access the jarvis APIs.
pub trait JarvisExt<R: Runtime> {
fn jarvis(&self) -> &Jarvis<R>;
}
impl<R: Runtime, T: Manager<R>> crate::JarvisExt<R> for T {
fn jarvis(&self) -> &Jarvis<R> {
self.state::<Jarvis<R>>().inner()
impl JarvisState {
pub fn new() -> Self {
let private_key =
crypto::RsaCrypto::generate_rsa().expect("Failed to generate RSA key pair");
let public_key: Rsa<Public> = crypto::RsaCrypto::private_key_to_public_key(&private_key);
Self {
window_label_ext_map: Mutex::new(HashMap::new()),
rsa_private_key: private_key,
rsa_public_key: public_key,
}
}
}
@ -167,19 +166,24 @@ pub fn init<R: Runtime>(db_key: Option<String>) -> TauriPlugin<R> {
/* -------------------------------------------------------------------------- */
/* MDNS */
/* -------------------------------------------------------------------------- */
commands::discovery::get_peers
commands::discovery::get_peers,
/* -------------------------------------------------------------------------- */
/* File Transfer */
/* -------------------------------------------------------------------------- */
// commands::server::get_files_to_send,
commands::file_transfer::get_file_transfer_bucket_keys,
commands::file_transfer::get_file_transfer_bucket_by_key,
commands::file_transfer::local_net_send_file,
commands::file_transfer::download_files,
commands::file_transfer::file_transfer_preview_bucket,
])
.setup(move |app, api| {
// #[cfg(mobile)]
// let jarvis = mobile::init(app, api)?;
#[cfg(desktop)]
let jarvis = desktop::init(app, api)?;
app.manage(jarvis);
utils::setup::setup_app_path(app);
utils::setup::setup_extension_storage(app);
// manage state so it is accessible by the commands
app.manage(JarvisState::default());
app.manage(JarvisState::new());
app.manage(FileTransferState::default());
app.manage(commands::apps::ApplicationsState::default());
let db_path = get_kunkun_db_path(app)?;
app.manage(commands::db::DBState::new(db_path.clone(), db_key.clone())?);

View File

@ -1,4 +1,3 @@
use std::{path::PathBuf, sync::Mutex};
pub struct AppState {
// pub history: Mutex<Vec<Record>>,

View File

@ -1,7 +1,5 @@
use super::manifest::Permissions;
use serde::{Deserialize, Serialize};
use std::{
net::SocketAddr,
path::PathBuf,
sync::{Arc, Mutex},
};

View File

@ -1 +1,33 @@
use serde::{Deserialize, Serialize};
use std::{
collections::HashMap,
path::PathBuf,
sync::{Arc, Mutex},
};
// #[derive(Default, Clone, Serialize, Deserialize, Debug)]
// pub struct FileTransferInfo {
// pub filename: String,
// pub code: String,
// pub ip: String,
// pub port: u16,
// }
/// A bucket of files, share the same code
#[derive(Default, Clone, Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct FilesBucket {
pub code: String,
pub id_path_map: HashMap<String, PathBuf>, // given id, get the path
}
#[derive(Default, Serialize, Deserialize, Debug)]
pub struct FileTransferState {
pub buckets: Arc<Mutex<HashMap<String, FilesBucket>>>,
}
#[derive(Default, Serialize, Deserialize, Debug)]
pub struct PreviewFileTransferBucket {
pub total_bytes: u128,
pub total_files: usize,
}

View File

@ -0,0 +1,31 @@
use tonic::transport::{Certificate, Channel, ClientTlsConfig};
/// given a self signed cert, return a grpc channel that trusts the cert
/// The domain name trusted is hard coded to localhost
pub async fn get_grpc_tls_channel(ip: &str, port: u16, cert_pem: &str) -> anyhow::Result<Channel> {
let ca = Certificate::from_pem(cert_pem);
let tls = ClientTlsConfig::new()
.ca_certificate(ca)
.domain_name("localhost");
let url = format!("https://{}:{}", ip, port);
Ok(Channel::from_shared(url)
.unwrap()
.tls_config(tls)?
.connect()
.await?)
}
// this implementation is failing
// pub async fn get_grpc_tls_client<T, C>(
// ip: &str,
// port: u16,
// cert_pem: &str
// ) -> anyhow::Result<T>
// where
// T: From<tonic::transport::Channel>,
// T: tonic::client::GrpcService<C>,
// C: Default,
// {
// let channel = get_grpc_tls_channel(ip, port, cert_pem).await?;
// Ok(T::from(channel))
// }

View File

@ -1,39 +1,275 @@
use file_transfer::file_transfer_server::FileTransfer;
use file_transfer::{
SendTransferInfoResponse, StartTransferRequest, StartTransferResponse, TransferInfo,
};
use tauri::AppHandle;
use tonic::{Request, Response, Status};
use std::collections::HashMap;
use std::path::{Path, PathBuf};
pub mod file_transfer {
tonic::include_proto!("file_transfer"); // The string specified here must match the proto package name
pub const FILE_DESCRIPTOR_SET: &[u8] = tonic::include_file_descriptor_set!("kk_grpc");
}
use grpc::file_transfer::file_transfer_server::FileTransfer;
use grpc::file_transfer::{FileNode, FileType, StartTransferRequest, StartTransferResponse};
use serde::{Deserialize, Serialize};
use tauri::{AppHandle, Emitter};
use tonic::{Request, Response, Status};
use uuid::Uuid;
#[derive(Debug)]
pub struct MyFileTransfer {
pub app_handle: AppHandle,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct FileTransferPayload {
pub port: String,
pub code: String,
pub total_bytes: u128,
pub total_files: usize,
pub root: FileNode,
pub ip: String,
pub ssl_cert: String,
}
#[tonic::async_trait]
impl FileTransfer for MyFileTransfer {
async fn start_transfer(
&self,
request: Request<StartTransferRequest>, // Accept request of type StartTransferRequest
) -> Result<Response<StartTransferResponse>, Status> {
println!("Got a request: {:?}", request);
let reply = StartTransferResponse {
port: "8080".to_string(),
let reply = StartTransferResponse {};
// let ip = request.remote_addr().unwrap().ip();
let ip = "localhost";
println!("start_transfer remote addr: {:?}", request.remote_addr());
let payload = request.into_inner();
println!("start_transfer payload: {:?}", payload);
let root = if let Some(root) = payload.root {
root
} else {
return Err(Status::invalid_argument("root is required"));
};
let total_bytes = compute_total_size(&root);
let total_files = count_file_nodes(&root);
// get ip from request
self.app_handle
.emit(
"file-transfer-request",
FileTransferPayload {
port: payload.port,
code: payload.code,
root,
total_bytes,
total_files,
ip: ip.to_string(),
ssl_cert: payload.ssl_cert,
},
)
.map_err(|e| Status::internal(e.to_string()))?;
Ok(Response::new(reply)) // Send back our formatted greeting
}
}
async fn send_transfer_info(
&self,
request: Request<TransferInfo>,
) -> Result<Response<SendTransferInfoResponse>, Status> {
println!("Got a request: {:?}", request);
Ok(Response::new(SendTransferInfoResponse {}))
pub fn construct_file_node(path: &Path) -> anyhow::Result<FileNode> {
if !path.exists() {
return Err(anyhow::anyhow!("path not exists"));
}
if !path.is_file() {
return Err(anyhow::anyhow!("path is not a file"));
}
Ok(FileNode {
filename: path
.file_name()
.expect("Fail to get file name")
.to_string_lossy()
.to_string(),
file_size: path.metadata()?.len(),
id: Uuid::new_v4().to_string(),
r#type: FileType::File as i32,
children: vec![],
})
}
pub fn construct_directory_node(path: &Path) -> anyhow::Result<FileNode> {
if !path.exists() {
return Err(anyhow::anyhow!("path not exists"));
}
if !path.is_dir() {
return Err(anyhow::anyhow!("path is not a directory"));
}
// construct children
let children = path
.read_dir()?
.filter_map(|entry| construct_node(&entry.ok()?.path()).ok())
.collect();
Ok(FileNode {
filename: path
.file_name()
.expect("Fail to get file name")
.to_string_lossy()
.to_string(),
file_size: path.metadata()?.len(),
id: Uuid::new_v4().to_string(),
r#type: FileType::Directory as i32,
children,
})
}
pub fn construct_node(path: &Path) -> anyhow::Result<FileNode> {
if path.is_file() {
construct_file_node(path)
} else {
construct_directory_node(path)
}
}
pub fn compute_file_node_total_size(node: &FileNode) -> u64 {
if node.r#type == FileType::File as i32 {
node.file_size
} else {
node.children
.iter()
.map(|child| compute_file_node_total_size(child))
.sum()
}
}
/// Flatten the file node tree to a vector of (id, path), path should be absolute path
/// `root_path` should be the directory containing
pub fn get_id_path_array(node: &FileNode, root_path: &Path) -> Vec<(String, PathBuf)> {
let mut vec: Vec<(String, PathBuf)> = Vec::new();
let dir_path = root_path.join(&node.filename);
if node.r#type == FileType::File as i32 {
vec.push((node.id.clone(), dir_path));
} else {
for child in node.children.iter() {
vec.extend(get_id_path_array(child, &dir_path));
}
}
vec
}
/// the returned root node will have empty filename because `files` passed in are not necessarily in the same directory
/// The root dir filename is left empty to avoid confusion, once the receiver receives the root node, it can fill in a custom directory name
pub fn build_file_node_and_id_path_map(
files: &Vec<PathBuf>,
) -> anyhow::Result<(HashMap<String, PathBuf>, FileNode)> {
let mut id_path_array: Vec<(String, PathBuf)> = Vec::new();
let mut children: Vec<FileNode> = vec![];
for file in files.iter() {
let node = construct_node(file).unwrap();
id_path_array.extend(get_id_path_array(&node, file.parent().unwrap()));
children.push(node);
}
let root = FileNode {
filename: "".to_string(),
file_size: 0,
id: Uuid::new_v4().to_string(),
r#type: FileType::Directory as i32,
children,
};
let map: HashMap<String, PathBuf> = id_path_array.into_iter().collect();
Ok((map, root))
}
pub fn count_file_nodes(node: &FileNode) -> usize {
if node.r#type == FileType::File as i32 {
1
} else {
node.children
.iter()
.map(|child| count_file_nodes(child))
.sum()
}
}
pub fn compute_total_size(node: &FileNode) -> u128 {
if node.r#type == FileType::File as i32 {
node.file_size as u128
} else {
node.children
.iter()
.map(|child| compute_total_size(child))
.sum()
}
}
mod test {
use super::*;
#[test]
fn test_construct_node() {
let manifest_path = std::env::var("CARGO_MANIFEST_DIR").unwrap();
let src_path = PathBuf::from(manifest_path).join("src");
// manifest_path is pointing to grpc crate
let node = construct_node(src_path.as_path()).unwrap();
println!("{:#?}", node);
// println!("total size: {}", compute_total_size(&node));
}
#[test]
fn test_build_file_node_and_id_path_map() {
let manifest_path = std::env::var("CARGO_MANIFEST_DIR").unwrap();
let src_path = PathBuf::from(manifest_path).join("src");
let (map, node) = build_file_node_and_id_path_map(&vec![src_path]).unwrap();
println!("{:#?}", node);
// check if all paths are absolute and exists
println!("{:#?}", map);
for (_, path) in map.iter() {
if !path.exists() {
panic!("path not exists: {}", path.to_string_lossy());
}
}
}
#[test]
fn test_get_id_path_array() {
let manifest_path = std::env::var("CARGO_MANIFEST_DIR").unwrap();
let src_path = PathBuf::from(manifest_path)
.join("src")
.canonicalize()
.unwrap();
assert!(src_path.exists());
// let src_path = PathBuf::from("/Users/hk/Dev/kunkun/packages/grpc/src");
let node = construct_node(&src_path).unwrap();
println!("{:#?}", node);
let array = get_id_path_array(&node, src_path.parent().unwrap());
println!("{:#?}", array);
// check if all paths are absolute and exists
for (_, path) in array.iter() {
if !path.exists() {
panic!("path not exists: {}", path.to_string_lossy());
}
// assert!(path.is_absolute());
// assert!(path.exists());
}
}
#[test]
fn test_count_file_nodes() {
let manifest_path = std::env::var("CARGO_MANIFEST_DIR").unwrap();
let src_path = PathBuf::from(manifest_path).join("src");
let node = construct_node(&src_path).unwrap();
// println!("{:#?}", node);
let count = count_file_nodes(&node);
// run "find . -type f | wc -l" to get a ground truth
if cfg!(target_os = "windows") {
return;
}
let stdout = String::from_utf8(
std::process::Command::new("find")
.arg(&src_path)
.arg("-type")
.arg("f")
.output()
.unwrap()
.stdout
).unwrap();
let count2 = stdout.lines().count();
assert_eq!(count, count2);
}
#[test]
fn test_compute_total_size() {
let manifest_path = std::env::var("CARGO_MANIFEST_DIR").unwrap();
let src_path = PathBuf::from(manifest_path).join("src");
println!("src_path: {}", src_path.to_string_lossy());
let node = construct_node(&src_path).unwrap();
let size = compute_total_size(&node);
println!("size: {}", size);
}
}

View File

@ -1,35 +0,0 @@
use hello_world::greeter_server::Greeter;
use hello_world::{HelloReply, HelloRequest};
use tauri::AppHandle;
use tonic::{Request, Response, Status};
pub mod hello_world {
tonic::include_proto!("helloworld"); // The string specified here must match the proto package name
pub const FILE_DESCRIPTOR_SET: &[u8] = tonic::include_file_descriptor_set!("kk_grpc");
}
#[derive(Debug)]
pub struct MyGreeter {
pub app_handle: AppHandle,
pub name: String,
}
#[tonic::async_trait]
impl Greeter for MyGreeter {
async fn say_hello(
&self,
request: Request<HelloRequest>, // Accept request of type HelloRequest
) -> Result<Response<HelloReply>, Status> {
println!("Got a request: {:?}", request);
let reply = HelloReply {
message: format!(
"Hello {} from {} by Kunkun {}!",
request.into_inner().name,
self.name,
self.app_handle.package_info().version
), // We must use .into_inner() as the fields of gRPC requests and responses are private
};
Ok(Response::new(reply)) // Send back our formatted greeting
}
}

View File

@ -0,0 +1,44 @@
// use hello_world::greeter_server::Greeter;
// use hello_world::{HelloReply, HelloRequest};
// use server_info«::server_info_server::ServerInfo;
// use server_info::{InfoRequest, InfoResponse};
use grpc::kunkun::{
kunkun_server::Kunkun,
Empty, ServerInfoResponse,
};
use tauri::{AppHandle, Emitter, Manager};
use tonic::{Request, Response, Status};
use crate::{constants::KUNKUN_REFRESH_WORKER_EXTENSION, server::http::Server, JarvisState};
#[derive(Debug)]
pub struct KunkunService {
pub app_handle: AppHandle,
}
#[tonic::async_trait]
impl Kunkun for KunkunService {
async fn server_info(
&self,
request: Request<Empty>,
) -> Result<Response<ServerInfoResponse>, Status> {
println!("Got a request: {:?}", request);
Ok(Response::new(ServerInfoResponse {
service_name: self.app_handle.package_info().name.clone(),
service_version: self.app_handle.package_info().version.to_string(),
public_key: crypto::RsaCrypto::public_key_to_string(
&self.app_handle.state::<JarvisState>().rsa_public_key,
),
ssl_cert: String::from_utf8(self.app_handle.state::<Server>().ssl_cert.clone())
.unwrap(),
}))
}
async fn hmr(&self, request: Request<Empty>) -> Result<Response<Empty>, Status> {
self.app_handle
.emit(KUNKUN_REFRESH_WORKER_EXTENSION, ())
.unwrap();
Ok(Response::new(Empty {}))
}
}

View File

@ -1,2 +1,3 @@
pub mod client;
pub mod file_transfer;
pub mod greeter;
pub mod kunkun;

View File

@ -1,26 +1,26 @@
use super::grpc::greeter::MyGreeter;
use super::grpc::{
file_transfer::file_transfer::file_transfer_server::FileTransferServer,
greeter::hello_world::greeter_server::GreeterServer,
};
use super::model::ServerState;
use super::Protocol;
use crate::server::grpc::file_transfer::MyFileTransfer;
use crate::server::tls::{CERT_PEM, KEY_PEM};
use crate::utils::path::get_default_extensions_dir;
use axum::http::{HeaderValue, Method, StatusCode, Uri};
use axum::routing::{get, get_service, post};
use crate::server::grpc::kunkun::KunkunService;
use axum::routing::{get, post};
use axum_server::tls_rustls::RustlsConfig;
use base64::prelude::*;
use grpc::{
file_transfer::file_transfer_server::FileTransferServer,
kunkun::kunkun_server::KunkunServer,
};
/// This module is responsible for controlling the main server
use obfstr::obfstr as s;
use std::sync::Mutex;
use std::{net::SocketAddr, path::PathBuf, sync::Arc};
use std::{net::SocketAddr, sync::Arc};
use tauri::AppHandle;
use tonic::transport::Server as TonicServer;
use tower_http::{cors::CorsLayer, services::ServeDir};
use tower_http::cors::CorsLayer;
struct ServerOptions {}
struct ServerOptions {
ssl_cert: Vec<u8>,
ssl_key: Vec<u8>,
}
async fn start_server(
protocol: Protocol,
@ -33,25 +33,20 @@ async fn start_server(
app_handle: app_handle.clone(),
};
let reflection_service = tonic_reflection::server::Builder::configure()
.register_encoded_file_descriptor_set(
super::grpc::greeter::hello_world::FILE_DESCRIPTOR_SET,
)
.register_encoded_file_descriptor_set(
super::grpc::file_transfer::file_transfer::FILE_DESCRIPTOR_SET,
)
.register_encoded_file_descriptor_set(grpc::kunkun::FILE_DESCRIPTOR_SET)
.register_encoded_file_descriptor_set(grpc::file_transfer::FILE_DESCRIPTOR_SET)
.build()
.unwrap();
let greeter = MyGreeter {
app_handle: app_handle.clone(),
name: "jarvis".to_string(),
};
let file_transfer = MyFileTransfer {
app_handle: app_handle.clone(),
};
let kk_service = KunkunService {
app_handle: app_handle.clone(),
};
let grpc_router = TonicServer::builder()
.add_service(reflection_service)
.add_service(GreeterServer::new(greeter))
.add_service(FileTransferServer::new(file_transfer))
.add_service(KunkunServer::new(kk_service))
.into_router();
let rest_router = axum::Router::new()
.route(
@ -59,13 +54,11 @@ async fn start_server(
post(super::rest::refresh_worker_extension),
)
.route("/info", get(super::rest::get_server_info))
.route("/download-file", get(super::rest::download_file))
// .route("/stream-file", get(super::rest::stream_file))
.layer(CorsLayer::permissive())
.with_state(server_state);
async fn fallback(uri: Uri) -> (StatusCode, String) {
println!("No route for {uri}");
(StatusCode::NOT_FOUND, format!("No route for {uri}"))
}
let combined_router = axum::Router::new()
.merge(grpc_router)
.merge(rest_router)
@ -78,27 +71,7 @@ async fn start_server(
.await
}
Protocol::Https => {
let manifest_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
println!("manifest_dir: {}", manifest_dir.display());
let (key_pem, cert_pem) = if cfg!(debug_assertions) {
// In debug mode, use the base64 encoded certs from env
let cert_pem = BASE64_STANDARD
.decode(s!(env!("BASE64_CERT_PEM")).to_string())
.expect("Failed to decode cert_pem");
let key_pem = BASE64_STANDARD
.decode(s!(env!("BASE64_KEY_PEM")).to_string())
.expect("Failed to decode key_pem");
(key_pem, cert_pem)
} else {
// In release mode, generate new self-signed certs every time app starts for safety
let rsa =
crypto::RsaCrypto::generate_rsa().expect("Failed to generate RSA key pair");
crypto::ssl::generate_self_signed_certificate(&rsa, 365)
.expect("Failed to generate self-signed certificate")
};
let tls_config = RustlsConfig::from_pem(cert_pem, key_pem).await?;
let tls_config = RustlsConfig::from_pem(options.ssl_cert, options.ssl_key).await?;
axum_server::bind_rustls(server_addr, tls_config)
.handle(shtdown_handle)
.serve(combined_router.into_make_service())
@ -114,16 +87,35 @@ pub struct Server {
pub protocol: Mutex<Protocol>,
pub port: u16,
pub server_handle: Arc<std::sync::Mutex<Option<tauri::async_runtime::JoinHandle<()>>>>,
pub ssl_cert: Vec<u8>,
pub ssl_key: Vec<u8>,
}
impl Server {
pub fn new(app_handle: AppHandle, port: u16, protocol: Protocol) -> Self {
let (key_pem, cert_pem) = if cfg!(debug_assertions) {
// In debug mode, use the base64 encoded certs from env
let cert_pem = BASE64_STANDARD
.decode(s!(env!("BASE64_CERT_PEM")).to_string())
.expect("Failed to decode cert_pem");
let key_pem = BASE64_STANDARD
.decode(s!(env!("BASE64_KEY_PEM")).to_string())
.expect("Failed to decode key_pem");
(key_pem, cert_pem)
} else {
// In release mode, generate new self-signed certs every time app starts for safety
let rsa = crypto::RsaCrypto::generate_rsa().expect("Failed to generate RSA key pair");
crypto::ssl::generate_self_signed_certificate(&rsa, 365)
.expect("Failed to generate self-signed certificate")
};
Self {
app_handle,
protocol: Mutex::new(protocol),
port,
server_handle: Arc::new(std::sync::Mutex::new(None)),
shtdown_handle: Arc::new(Mutex::new(None)),
ssl_cert: cert_pem,
ssl_key: key_pem,
}
}
@ -143,6 +135,8 @@ impl Server {
let _shutdown_handle = axum_server::Handle::new();
*shtdown_handle = Some(_shutdown_handle.clone());
let protocol = self.protocol.lock().unwrap().clone();
let ssl_cert = self.ssl_cert.clone();
let ssl_key = self.ssl_key.clone();
*server_handle = Some(tauri::async_runtime::spawn(async move {
match start_server(
@ -150,7 +144,7 @@ impl Server {
server_addr,
app_handle,
_shutdown_handle,
ServerOptions {},
ServerOptions { ssl_cert, ssl_key },
)
.await
{

View File

@ -1,11 +1,12 @@
use serde::Serialize;
use tauri::{AppHandle, Runtime};
use serde::{Deserialize, Serialize};
use tauri::AppHandle;
#[derive(Serialize)]
#[derive(Serialize, Debug, Deserialize)]
pub struct ServerInfo {
pub service_name: String,
pub service_version: String,
pub public_key: String,
pub ssl_cert: String,
}
#[derive(Clone)]
@ -13,3 +14,17 @@ pub struct ServerState {
// that holds some api specific state
pub app_handle: AppHandle,
}
#[derive(Serialize, Debug, Clone)]
pub struct FileTransferProgressPayload {
pub progress: f64,
pub sent_bytes: u64,
pub total_size: u64,
pub chunk_count: u64,
}
#[derive(Debug, Deserialize)]
#[allow(dead_code)]
pub struct DownloadFilePayload {
pub id: String,
}

View File

@ -1,8 +1,23 @@
use super::model::{ServerInfo, ServerState};
use crate::constants::{KUNKUN_REFRESH_WORKER_EXTENSION, SERVER_PUBLIC_KEY};
use axum::extract::State;
use tauri::Emitter;
use super::{
http::Server,
model::{ServerInfo, ServerState},
};
use crate::{
constants::KUNKUN_REFRESH_WORKER_EXTENSION,
models::FileTransferState,
server::model::FileTransferProgressPayload,
JarvisState,
};
use axum::{
body::StreamBody,
extract::{Query, State},
http::{header, HeaderMap, StatusCode},
response::IntoResponse,
};
use tauri::{Emitter, Manager};
use tokio::fs::File;
use tokio_stream::StreamExt;
use tokio_util::io::ReaderStream;
/// This file contains REST API endpoints
pub async fn web_root() -> axum::Json<serde_json::Value> {
@ -13,10 +28,17 @@ pub async fn web_root() -> axum::Json<serde_json::Value> {
pub async fn get_server_info(State(state): State<ServerState>) -> axum::Json<ServerInfo> {
let pkg_info = state.app_handle.package_info();
let jarvis_state = state.app_handle.state::<JarvisState>();
let pub_key_pem = jarvis_state
.rsa_public_key
.public_key_to_pem()
.expect("Failed to convert public key to pem");
axum::Json(ServerInfo {
service_name: pkg_info.name.to_string(),
service_version: pkg_info.version.to_string(),
public_key: String::from_utf8(SERVER_PUBLIC_KEY.clone()).unwrap(),
public_key: String::from_utf8(pub_key_pem).expect("Failed to convert public key to string"),
ssl_cert: String::from_utf8(state.app_handle.state::<Server>().ssl_cert.clone()).unwrap(),
})
}
@ -27,3 +49,109 @@ pub async fn refresh_worker_extension(State(state): State<ServerState>) -> &'sta
.unwrap();
"OK"
}
#[derive(Debug, serde::Deserialize)]
pub struct Params {
pub id: String,
}
/// Download a file from the server
/// Sample wget command: wget --header="Authorization: TOKEN" --no-check-certificate --content-disposition https://localhost:9559/download-file
pub async fn download_file(
Query(params): Query<Params>,
State(state): State<ServerState>,
headers: HeaderMap,
) -> impl IntoResponse {
println!("download_file");
println!("params: {:?}", params);
// read authorization header
let auth_header = headers.get("Authorization");
// let file_id = headers.get("file_id");
let auth_header_str = if let Some(auth_header) = auth_header {
auth_header.to_str().unwrap()
} else {
return (StatusCode::FORBIDDEN, "Forbidden").into_response();
};
let file_transfer_state = state.app_handle.state::<FileTransferState>();
// !The mutex related operations must be done in a separate scope, otherwise it will conflict with tokio::fs::File::open, not sure why
let file_path = {
let buckets = file_transfer_state
.buckets
.lock()
.expect("Failed to get buckets mutex");
let bucket = match buckets.get(auth_header_str) {
Some(b) => b,
None => return (StatusCode::NOT_FOUND, "File not found").into_response(),
};
let file_path = match bucket.id_path_map.get(&params.id) {
Some(f) => f,
None => return (StatusCode::NOT_FOUND, "File not found").into_response(),
};
file_path.clone()
};
let app_handle = state.app_handle.clone();
let response = match File::open(&file_path).await {
Ok(file) => {
let total_size = file.metadata().await.map(|m| m.len()).unwrap_or(0);
let stream = ReaderStream::new(file);
let mut sent_bytes = 0u64;
let mut chunk_count = 0;
let progress_stream = stream.map(move |chunk| {
if let Ok(ref chunk_data) = chunk {
sent_bytes += chunk_data.len() as u64;
// every chunk is 4104 bytes (4kb)
chunk_count += 1;
// Only emit progress every 10 chunks
if chunk_count % 10 == 0 {
// Emit progress to the frontend
let progress = (sent_bytes as f64 / total_size as f64) * 100.0;
println!(
"progress: {} ({}/{} bytes, {} chunks)",
progress, sent_bytes, total_size, chunk_count
);
tauri::async_runtime::spawn({
let app_handle = app_handle.clone();
async move {
app_handle
.emit(
"file-download-progress",
FileTransferProgressPayload {
progress,
sent_bytes,
total_size,
chunk_count,
},
)
.unwrap_or_else(|e| eprintln!("Error emitting event: {:?}", e));
}
});
}
}
chunk
});
let body = StreamBody::new(progress_stream);
(
[
(
header::CONTENT_TYPE,
mime_guess::from_path(&file_path)
.first_or_octet_stream()
.as_ref(),
),
(
header::CONTENT_DISPOSITION,
format!(
"attachment; filename={}",
file_path.file_name().unwrap().to_str().unwrap()
)
.as_str(),
),
],
body,
)
.into_response()
}
Err(e) => (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()).into_response(),
};
response
}

View File

@ -1,18 +1,25 @@
use crate::commands::discovery::Peers;
use crate::commands::discovery::{Peers, ServiceInfoMod};
use mdns_sd::ServiceEvent;
use std::collections::HashMap;
use sysinfo::System;
use tauri::{AppHandle, Manager, Runtime};
use tauri_plugin_network::network::mdns::MdnsService;
use uuid::Uuid;
pub fn setup_mdns(my_port: u16) -> anyhow::Result<MdnsService> {
pub fn setup_mdns(my_port: u16, public_rsa_key: String) -> anyhow::Result<MdnsService> {
let mdns = MdnsService::new("kunkun")?;
let id = Uuid::new_v4();
let mut properties: HashMap<String, String> = HashMap::new();
if let Some(hostname) = System::host_name() {
properties.insert("hostname".to_string(), hostname);
}
// there seems to be a limit on txt properties, I can't include public key here
mdns.register(
&format!("desktop-{}", id),
&MdnsService::get_default_ips_str(),
my_port,
None,
None,
Some(properties),
)?;
Ok(mdns)
}
@ -29,10 +36,20 @@ pub fn handle_mdns_service_evt<R: Runtime>(
// log::info!("Service Resolved: {:?}", info);
// },
ServiceEvent::ServiceResolved(info) => {
log::info!("Service Resolved: {:?}", info);
app_handle.state::<Peers>().add_peer(info.into());
let peers = app_handle.state::<Peers>().peers.lock().unwrap().clone();
log::info!("Peers: {:?}", peers);
log::info!("Service Resolved: {:#?}", info);
match ServiceInfoMod::from(info).await {
Ok(service_info) => {
app_handle.state::<Peers>().add_peer(service_info).await;
if let Ok(peers) = app_handle.state::<Peers>().peers.lock() {
log::info!("Peers: {:#?}", peers.clone());
} else {
log::error!("Failed to acquire peers lock");
}
}
Err(e) => {
log::error!("Failed to create ServiceInfoMod: {}", e);
}
}
}
ServiceEvent::ServiceRemoved(service_type, fullname) => {
log::info!("Service Removed: {:?} {:?}", service_type, fullname);

View File

@ -1,13 +1,12 @@
use applications::utils::image::{self, RustImage, RustImageData};
use applications::utils::image::{RustImage, RustImageData};
use std::{
ffi::OsStr,
fs::File,
io::{BufReader, Cursor, Write},
path::{Path, PathBuf},
io::{BufReader, Cursor},
path::PathBuf,
};
#[cfg(target_os = "macos")]
use tauri_icns::{IconFamily, IconType};
use uuid::Uuid;
#[cfg(target_os = "macos")]
/// Load Apple icns

View File

@ -4,7 +4,9 @@ pub mod icns;
pub mod manifest;
pub mod path;
pub mod plist;
pub mod reqwest;
pub mod script;
pub mod settings;
pub mod setup;
pub mod time;
pub mod transfer_stats;

Some files were not shown because too many files have changed in this diff Show More