mirror of
https://github.com/samsonjs/vibetunnel.git
synced 2026-04-05 11:15:57 +00:00
feat(tauri): Major refactoring with enhanced features and managers
- Add comprehensive manager system for various features: - Notification manager for in-app notifications - Permission manager for system permissions - Update manager for app updates - Backend manager for server backend management - Debug features manager for debugging tools - API testing manager for API test suites - Auth cache manager for credential caching - Terminal integrations manager for terminal emulator support - Session monitor for tracking active sessions - Port conflict resolver for port management - Network utilities for network information - TTY forward manager for TTY forwarding - Cast manager for terminal recording - App mover for macOS app location management - Terminal spawn service for launching terminals - File system API for file operations - Add settings UI pages (settings.html, server-console.html) - Update tauri.conf.json with new configuration - Enhance server implementation with better state management - Add comprehensive command system for all managers - Update dependencies in Cargo.toml - Add welcome screen manager for onboarding - Implement proper state management across all components
This commit is contained in:
parent
18f724ae22
commit
2b5060e75f
33 changed files with 14296 additions and 43 deletions
233
tauri/package-lock.json
generated
Normal file
233
tauri/package-lock.json
generated
Normal file
|
|
@ -0,0 +1,233 @@
|
|||
{
|
||||
"name": "vibetunnel-tauri",
|
||||
"version": "1.0.0",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "vibetunnel-tauri",
|
||||
"version": "1.0.0",
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"@tauri-apps/cli": "^2.0.0-rc.18"
|
||||
}
|
||||
},
|
||||
"node_modules/@tauri-apps/cli": {
|
||||
"version": "2.5.0",
|
||||
"resolved": "https://registry.npmjs.org/@tauri-apps/cli/-/cli-2.5.0.tgz",
|
||||
"integrity": "sha512-rAtHqG0Gh/IWLjN2zTf3nZqYqbo81oMbqop56rGTjrlWk9pTTAjkqOjSL9XQLIMZ3RbeVjveCqqCA0s8RnLdMg==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0 OR MIT",
|
||||
"bin": {
|
||||
"tauri": "tauri.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/tauri"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@tauri-apps/cli-darwin-arm64": "2.5.0",
|
||||
"@tauri-apps/cli-darwin-x64": "2.5.0",
|
||||
"@tauri-apps/cli-linux-arm-gnueabihf": "2.5.0",
|
||||
"@tauri-apps/cli-linux-arm64-gnu": "2.5.0",
|
||||
"@tauri-apps/cli-linux-arm64-musl": "2.5.0",
|
||||
"@tauri-apps/cli-linux-riscv64-gnu": "2.5.0",
|
||||
"@tauri-apps/cli-linux-x64-gnu": "2.5.0",
|
||||
"@tauri-apps/cli-linux-x64-musl": "2.5.0",
|
||||
"@tauri-apps/cli-win32-arm64-msvc": "2.5.0",
|
||||
"@tauri-apps/cli-win32-ia32-msvc": "2.5.0",
|
||||
"@tauri-apps/cli-win32-x64-msvc": "2.5.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@tauri-apps/cli-darwin-arm64": {
|
||||
"version": "2.5.0",
|
||||
"resolved": "https://registry.npmjs.org/@tauri-apps/cli-darwin-arm64/-/cli-darwin-arm64-2.5.0.tgz",
|
||||
"integrity": "sha512-VuVAeTFq86dfpoBDNYAdtQVLbP0+2EKCHIIhkaxjeoPARR0sLpFHz2zs0PcFU76e+KAaxtEtAJAXGNUc8E1PzQ==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"dev": true,
|
||||
"license": "Apache-2.0 OR MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
},
|
||||
"node_modules/@tauri-apps/cli-darwin-x64": {
|
||||
"version": "2.5.0",
|
||||
"resolved": "https://registry.npmjs.org/@tauri-apps/cli-darwin-x64/-/cli-darwin-x64-2.5.0.tgz",
|
||||
"integrity": "sha512-hUF01sC06cZVa8+I0/VtsHOk9BbO75rd+YdtHJ48xTdcYaQ5QIwL4yZz9OR1AKBTaUYhBam8UX9Pvd5V2/4Dpw==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"dev": true,
|
||||
"license": "Apache-2.0 OR MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
},
|
||||
"node_modules/@tauri-apps/cli-linux-arm-gnueabihf": {
|
||||
"version": "2.5.0",
|
||||
"resolved": "https://registry.npmjs.org/@tauri-apps/cli-linux-arm-gnueabihf/-/cli-linux-arm-gnueabihf-2.5.0.tgz",
|
||||
"integrity": "sha512-LQKqttsK252LlqYyX8R02MinUsfFcy3+NZiJwHFgi5Y3+ZUIAED9cSxJkyNtuY5KMnR4RlpgWyLv4P6akN1xhg==",
|
||||
"cpu": [
|
||||
"arm"
|
||||
],
|
||||
"dev": true,
|
||||
"license": "Apache-2.0 OR MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
},
|
||||
"node_modules/@tauri-apps/cli-linux-arm64-gnu": {
|
||||
"version": "2.5.0",
|
||||
"resolved": "https://registry.npmjs.org/@tauri-apps/cli-linux-arm64-gnu/-/cli-linux-arm64-gnu-2.5.0.tgz",
|
||||
"integrity": "sha512-mTQufsPcpdHg5RW0zypazMo4L55EfeE5snTzrPqbLX4yCK2qalN7+rnP8O8GT06xhp6ElSP/Ku1M2MR297SByQ==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"dev": true,
|
||||
"license": "Apache-2.0 OR MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
},
|
||||
"node_modules/@tauri-apps/cli-linux-arm64-musl": {
|
||||
"version": "2.5.0",
|
||||
"resolved": "https://registry.npmjs.org/@tauri-apps/cli-linux-arm64-musl/-/cli-linux-arm64-musl-2.5.0.tgz",
|
||||
"integrity": "sha512-rQO1HhRUQqyEaal5dUVOQruTRda/TD36s9kv1hTxZiFuSq3558lsTjAcUEnMAtBcBkps20sbyTJNMT0AwYIk8Q==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"dev": true,
|
||||
"license": "Apache-2.0 OR MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
},
|
||||
"node_modules/@tauri-apps/cli-linux-riscv64-gnu": {
|
||||
"version": "2.5.0",
|
||||
"resolved": "https://registry.npmjs.org/@tauri-apps/cli-linux-riscv64-gnu/-/cli-linux-riscv64-gnu-2.5.0.tgz",
|
||||
"integrity": "sha512-7oS18FN46yDxyw1zX/AxhLAd7T3GrLj3Ai6s8hZKd9qFVzrAn36ESL7d3G05s8wEtsJf26qjXnVF4qleS3dYsA==",
|
||||
"cpu": [
|
||||
"riscv64"
|
||||
],
|
||||
"dev": true,
|
||||
"license": "Apache-2.0 OR MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
},
|
||||
"node_modules/@tauri-apps/cli-linux-x64-gnu": {
|
||||
"version": "2.5.0",
|
||||
"resolved": "https://registry.npmjs.org/@tauri-apps/cli-linux-x64-gnu/-/cli-linux-x64-gnu-2.5.0.tgz",
|
||||
"integrity": "sha512-SG5sFNL7VMmDBdIg3nO3EzNRT306HsiEQ0N90ILe3ZABYAVoPDO/ttpCO37ApLInTzrq/DLN+gOlC/mgZvLw1w==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"dev": true,
|
||||
"license": "Apache-2.0 OR MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
},
|
||||
"node_modules/@tauri-apps/cli-linux-x64-musl": {
|
||||
"version": "2.5.0",
|
||||
"resolved": "https://registry.npmjs.org/@tauri-apps/cli-linux-x64-musl/-/cli-linux-x64-musl-2.5.0.tgz",
|
||||
"integrity": "sha512-QXDM8zp/6v05PNWju5ELsVwF0VH1n6b5pk2E6W/jFbbiwz80Vs1lACl9pv5kEHkrxBj+aWU/03JzGuIj2g3SkQ==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"dev": true,
|
||||
"license": "Apache-2.0 OR MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
},
|
||||
"node_modules/@tauri-apps/cli-win32-arm64-msvc": {
|
||||
"version": "2.5.0",
|
||||
"resolved": "https://registry.npmjs.org/@tauri-apps/cli-win32-arm64-msvc/-/cli-win32-arm64-msvc-2.5.0.tgz",
|
||||
"integrity": "sha512-pFSHFK6b+o9y4Un8w0gGLwVyFTZaC3P0kQ7umRt/BLDkzD5RnQ4vBM7CF8BCU5nkwmEBUCZd7Wt3TWZxe41o6Q==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"dev": true,
|
||||
"license": "Apache-2.0 OR MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"win32"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
},
|
||||
"node_modules/@tauri-apps/cli-win32-ia32-msvc": {
|
||||
"version": "2.5.0",
|
||||
"resolved": "https://registry.npmjs.org/@tauri-apps/cli-win32-ia32-msvc/-/cli-win32-ia32-msvc-2.5.0.tgz",
|
||||
"integrity": "sha512-EArv1IaRlogdLAQyGlKmEqZqm5RfHCUMhJoedWu7GtdbOMUfSAz6FMX2boE1PtEmNO4An+g188flLeVErrxEKg==",
|
||||
"cpu": [
|
||||
"ia32"
|
||||
],
|
||||
"dev": true,
|
||||
"license": "Apache-2.0 OR MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"win32"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
},
|
||||
"node_modules/@tauri-apps/cli-win32-x64-msvc": {
|
||||
"version": "2.5.0",
|
||||
"resolved": "https://registry.npmjs.org/@tauri-apps/cli-win32-x64-msvc/-/cli-win32-x64-msvc-2.5.0.tgz",
|
||||
"integrity": "sha512-lj43EFYbnAta8pd9JnUq87o+xRUR0odz+4rixBtTUwUgdRdwQ2V9CzFtsMu6FQKpFQ6mujRK6P1IEwhL6ADRsQ==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"dev": true,
|
||||
"license": "Apache-2.0 OR MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"win32"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
634
tauri/public/server-console.html
Normal file
634
tauri/public/server-console.html
Normal file
|
|
@ -0,0 +1,634 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>Server Console - VibeTunnel</title>
|
||||
<style>
|
||||
:root {
|
||||
/* Light mode colors */
|
||||
--bg-color: #f5f5f7;
|
||||
--window-bg: #ffffff;
|
||||
--text-primary: #1d1d1f;
|
||||
--text-secondary: #86868b;
|
||||
--text-tertiary: #c7c7cc;
|
||||
--accent-color: #007aff;
|
||||
--accent-hover: #0051d5;
|
||||
--border-color: rgba(0, 0, 0, 0.1);
|
||||
--shadow-color: rgba(0, 0, 0, 0.1);
|
||||
--console-bg: #1e1e1e;
|
||||
--console-text: #d4d4d4;
|
||||
--console-info: #3794ff;
|
||||
--console-success: #4ec9b0;
|
||||
--console-warning: #ce9178;
|
||||
--console-error: #f48771;
|
||||
--console-debug: #b5cea8;
|
||||
}
|
||||
|
||||
@media (prefers-color-scheme: dark) {
|
||||
:root {
|
||||
/* Dark mode colors */
|
||||
--bg-color: #000000;
|
||||
--window-bg: #1c1c1e;
|
||||
--text-primary: #f5f5f7;
|
||||
--text-secondary: #98989d;
|
||||
--text-tertiary: #48484a;
|
||||
--accent-color: #0a84ff;
|
||||
--accent-hover: #409cff;
|
||||
--border-color: rgba(255, 255, 255, 0.1);
|
||||
--shadow-color: rgba(0, 0, 0, 0.5);
|
||||
--console-bg: #0e0e0e;
|
||||
}
|
||||
}
|
||||
|
||||
* {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
body {
|
||||
font-family: -apple-system, BlinkMacSystemFont, 'SF Pro Display', 'Segoe UI', system-ui, sans-serif;
|
||||
background-color: var(--bg-color);
|
||||
color: var(--text-primary);
|
||||
width: 100vw;
|
||||
height: 100vh;
|
||||
overflow: hidden;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
/* Header */
|
||||
.header {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
padding: 12px 16px;
|
||||
background-color: var(--window-bg);
|
||||
border-bottom: 1px solid var(--border-color);
|
||||
}
|
||||
|
||||
.header-title {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
font-size: 14px;
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
.status-indicator {
|
||||
width: 8px;
|
||||
height: 8px;
|
||||
border-radius: 50%;
|
||||
background-color: var(--console-success);
|
||||
animation: pulse 2s ease-in-out infinite;
|
||||
}
|
||||
|
||||
.status-indicator.stopped {
|
||||
background-color: var(--console-error);
|
||||
animation: none;
|
||||
}
|
||||
|
||||
@keyframes pulse {
|
||||
0%, 100% { opacity: 1; }
|
||||
50% { opacity: 0.5; }
|
||||
}
|
||||
|
||||
.header-controls {
|
||||
display: flex;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
.button {
|
||||
padding: 6px 12px;
|
||||
font-size: 12px;
|
||||
font-weight: 500;
|
||||
border: none;
|
||||
border-radius: 4px;
|
||||
cursor: pointer;
|
||||
transition: all 0.2s ease;
|
||||
background-color: var(--accent-color);
|
||||
color: white;
|
||||
}
|
||||
|
||||
.button:hover {
|
||||
background-color: var(--accent-hover);
|
||||
}
|
||||
|
||||
.button:active {
|
||||
transform: scale(0.98);
|
||||
}
|
||||
|
||||
.button.secondary {
|
||||
background-color: transparent;
|
||||
color: var(--accent-color);
|
||||
border: 1px solid var(--accent-color);
|
||||
}
|
||||
|
||||
.button.secondary:hover {
|
||||
background-color: var(--accent-color);
|
||||
color: white;
|
||||
}
|
||||
|
||||
.button:disabled {
|
||||
opacity: 0.5;
|
||||
cursor: not-allowed;
|
||||
}
|
||||
|
||||
/* Filter Bar */
|
||||
.filter-bar {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 12px;
|
||||
padding: 8px 16px;
|
||||
background-color: var(--window-bg);
|
||||
border-bottom: 1px solid var(--border-color);
|
||||
}
|
||||
|
||||
.search-input {
|
||||
flex: 1;
|
||||
padding: 6px 12px;
|
||||
font-size: 13px;
|
||||
border: 1px solid var(--border-color);
|
||||
border-radius: 4px;
|
||||
background-color: var(--bg-color);
|
||||
color: var(--text-primary);
|
||||
outline: none;
|
||||
}
|
||||
|
||||
.search-input:focus {
|
||||
border-color: var(--accent-color);
|
||||
box-shadow: 0 0 0 2px rgba(0, 122, 255, 0.1);
|
||||
}
|
||||
|
||||
.filter-buttons {
|
||||
display: flex;
|
||||
gap: 4px;
|
||||
}
|
||||
|
||||
.filter-button {
|
||||
padding: 4px 8px;
|
||||
font-size: 11px;
|
||||
font-weight: 500;
|
||||
border: 1px solid var(--border-color);
|
||||
border-radius: 4px;
|
||||
background-color: transparent;
|
||||
color: var(--text-secondary);
|
||||
cursor: pointer;
|
||||
transition: all 0.2s ease;
|
||||
}
|
||||
|
||||
.filter-button:hover {
|
||||
background-color: var(--bg-color);
|
||||
}
|
||||
|
||||
.filter-button.active {
|
||||
background-color: var(--accent-color);
|
||||
color: white;
|
||||
border-color: var(--accent-color);
|
||||
}
|
||||
|
||||
/* Console */
|
||||
.console-container {
|
||||
flex: 1;
|
||||
background-color: var(--console-bg);
|
||||
overflow: hidden;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
.console {
|
||||
flex: 1;
|
||||
padding: 12px 16px;
|
||||
overflow-y: auto;
|
||||
font-family: 'SF Mono', Monaco, 'Cascadia Code', monospace;
|
||||
font-size: 12px;
|
||||
line-height: 1.5;
|
||||
color: var(--console-text);
|
||||
-webkit-user-select: text;
|
||||
user-select: text;
|
||||
}
|
||||
|
||||
/* Custom scrollbar */
|
||||
.console::-webkit-scrollbar {
|
||||
width: 8px;
|
||||
}
|
||||
|
||||
.console::-webkit-scrollbar-track {
|
||||
background: rgba(255, 255, 255, 0.05);
|
||||
}
|
||||
|
||||
.console::-webkit-scrollbar-thumb {
|
||||
background: rgba(255, 255, 255, 0.2);
|
||||
border-radius: 4px;
|
||||
}
|
||||
|
||||
.console::-webkit-scrollbar-thumb:hover {
|
||||
background: rgba(255, 255, 255, 0.3);
|
||||
}
|
||||
|
||||
/* Log entries */
|
||||
.log-entry {
|
||||
margin-bottom: 2px;
|
||||
padding: 2px 0;
|
||||
display: flex;
|
||||
align-items: flex-start;
|
||||
gap: 8px;
|
||||
opacity: 0;
|
||||
animation: fadeIn 0.2s ease-out forwards;
|
||||
}
|
||||
|
||||
@keyframes fadeIn {
|
||||
to { opacity: 1; }
|
||||
}
|
||||
|
||||
.log-timestamp {
|
||||
color: var(--text-tertiary);
|
||||
flex-shrink: 0;
|
||||
font-size: 11px;
|
||||
}
|
||||
|
||||
.log-level {
|
||||
font-weight: 600;
|
||||
text-transform: uppercase;
|
||||
font-size: 10px;
|
||||
padding: 2px 6px;
|
||||
border-radius: 3px;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.log-level.trace {
|
||||
color: var(--console-debug);
|
||||
background-color: rgba(181, 206, 168, 0.1);
|
||||
}
|
||||
|
||||
.log-level.debug {
|
||||
color: var(--console-debug);
|
||||
background-color: rgba(181, 206, 168, 0.1);
|
||||
}
|
||||
|
||||
.log-level.info {
|
||||
color: var(--console-info);
|
||||
background-color: rgba(55, 148, 255, 0.1);
|
||||
}
|
||||
|
||||
.log-level.warn {
|
||||
color: var(--console-warning);
|
||||
background-color: rgba(206, 145, 120, 0.1);
|
||||
}
|
||||
|
||||
.log-level.error {
|
||||
color: var(--console-error);
|
||||
background-color: rgba(244, 135, 113, 0.1);
|
||||
}
|
||||
|
||||
.log-level.success {
|
||||
color: var(--console-success);
|
||||
background-color: rgba(78, 201, 176, 0.1);
|
||||
}
|
||||
|
||||
.log-message {
|
||||
flex: 1;
|
||||
word-wrap: break-word;
|
||||
}
|
||||
|
||||
.log-entry.hidden {
|
||||
display: none;
|
||||
}
|
||||
|
||||
/* Footer */
|
||||
.footer {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
padding: 8px 16px;
|
||||
background-color: var(--window-bg);
|
||||
border-top: 1px solid var(--border-color);
|
||||
font-size: 11px;
|
||||
color: var(--text-secondary);
|
||||
}
|
||||
|
||||
.log-stats {
|
||||
display: flex;
|
||||
gap: 16px;
|
||||
}
|
||||
|
||||
.stat-item {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
}
|
||||
|
||||
.stat-count {
|
||||
font-weight: 600;
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
/* Empty state */
|
||||
.empty-state {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
height: 100%;
|
||||
color: var(--text-tertiary);
|
||||
font-size: 14px;
|
||||
text-align: center;
|
||||
padding: 20px;
|
||||
}
|
||||
|
||||
.empty-icon {
|
||||
width: 48px;
|
||||
height: 48px;
|
||||
margin-bottom: 16px;
|
||||
opacity: 0.3;
|
||||
}
|
||||
|
||||
/* Loading state */
|
||||
.loading {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
height: 100%;
|
||||
color: var(--text-secondary);
|
||||
}
|
||||
|
||||
.spinner {
|
||||
width: 24px;
|
||||
height: 24px;
|
||||
border: 2px solid var(--border-color);
|
||||
border-top-color: var(--accent-color);
|
||||
border-radius: 50%;
|
||||
animation: spin 0.8s linear infinite;
|
||||
margin-right: 12px;
|
||||
}
|
||||
|
||||
@keyframes spin {
|
||||
to { transform: rotate(360deg); }
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="header">
|
||||
<div class="header-title">
|
||||
<div class="status-indicator" id="statusIndicator"></div>
|
||||
<span>Server Console</span>
|
||||
<span id="serverInfo" style="color: var(--text-secondary); font-weight: normal;">Port 4020</span>
|
||||
</div>
|
||||
<div class="header-controls">
|
||||
<button class="button secondary" onclick="clearConsole()">Clear</button>
|
||||
<button class="button secondary" onclick="exportLogs()">Export</button>
|
||||
<button class="button secondary" onclick="toggleAutoScroll()" id="autoScrollBtn">Auto-scroll: ON</button>
|
||||
<button class="button" onclick="toggleServer()" id="serverToggleBtn">Stop Server</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="filter-bar">
|
||||
<input type="text" class="search-input" placeholder="Search logs..." id="searchInput" oninput="filterLogs()">
|
||||
<div class="filter-buttons">
|
||||
<button class="filter-button active" data-level="all" onclick="setLogFilter('all')">All</button>
|
||||
<button class="filter-button" data-level="error" onclick="setLogFilter('error')">Errors</button>
|
||||
<button class="filter-button" data-level="warn" onclick="setLogFilter('warn')">Warnings</button>
|
||||
<button class="filter-button" data-level="info" onclick="setLogFilter('info')">Info</button>
|
||||
<button class="filter-button" data-level="debug" onclick="setLogFilter('debug')">Debug</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="console-container">
|
||||
<div class="console" id="console">
|
||||
<div class="loading" id="loadingState">
|
||||
<div class="spinner"></div>
|
||||
<span>Connecting to server...</span>
|
||||
</div>
|
||||
<div class="empty-state" id="emptyState" style="display: none;">
|
||||
<svg class="empty-icon" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M9 12h6m-6 4h6m2 5H7a2 2 0 01-2-2V5a2 2 0 012-2h5.586a1 1 0 01.707.293l5.414 5.414a1 1 0 01.293.707V19a2 2 0 01-2 2z"></path>
|
||||
</svg>
|
||||
<p>No logs yet</p>
|
||||
<p style="font-size: 12px; margin-top: 8px;">Server logs will appear here when activity occurs</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="footer">
|
||||
<div class="log-stats">
|
||||
<div class="stat-item">
|
||||
<span>Total:</span>
|
||||
<span class="stat-count" id="totalCount">0</span>
|
||||
</div>
|
||||
<div class="stat-item">
|
||||
<span>Errors:</span>
|
||||
<span class="stat-count" id="errorCount">0</span>
|
||||
</div>
|
||||
<div class="stat-item">
|
||||
<span>Warnings:</span>
|
||||
<span class="stat-count" id="warnCount">0</span>
|
||||
</div>
|
||||
</div>
|
||||
<div id="connectionStatus">Connected</div>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
const { invoke } = window.__TAURI__.tauri;
|
||||
const { appWindow } = window.__TAURI__.window;
|
||||
const { open } = window.__TAURI__.shell;
|
||||
|
||||
let logs = [];
|
||||
let autoScroll = true;
|
||||
let currentFilter = 'all';
|
||||
let searchTerm = '';
|
||||
let isServerRunning = true;
|
||||
let updateInterval;
|
||||
|
||||
// Initialize
|
||||
async function init() {
|
||||
await loadServerStatus();
|
||||
await loadLogs();
|
||||
|
||||
// Start periodic updates
|
||||
updateInterval = setInterval(async () => {
|
||||
await loadServerStatus();
|
||||
await loadLogs();
|
||||
}, 1000);
|
||||
}
|
||||
|
||||
// Load server status
|
||||
async function loadServerStatus() {
|
||||
try {
|
||||
const status = await invoke('get_server_status');
|
||||
isServerRunning = status.running;
|
||||
|
||||
const indicator = document.getElementById('statusIndicator');
|
||||
const toggleBtn = document.getElementById('serverToggleBtn');
|
||||
const serverInfo = document.getElementById('serverInfo');
|
||||
|
||||
if (isServerRunning) {
|
||||
indicator.classList.remove('stopped');
|
||||
toggleBtn.textContent = 'Stop Server';
|
||||
serverInfo.textContent = `Port ${status.port}`;
|
||||
} else {
|
||||
indicator.classList.add('stopped');
|
||||
toggleBtn.textContent = 'Start Server';
|
||||
serverInfo.textContent = 'Stopped';
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to load server status:', error);
|
||||
}
|
||||
}
|
||||
|
||||
// Load logs
|
||||
async function loadLogs() {
|
||||
try {
|
||||
const newLogs = await invoke('get_server_logs', { limit: 1000 });
|
||||
|
||||
// Hide loading state
|
||||
document.getElementById('loadingState').style.display = 'none';
|
||||
|
||||
if (newLogs.length === 0 && logs.length === 0) {
|
||||
document.getElementById('emptyState').style.display = 'flex';
|
||||
return;
|
||||
} else {
|
||||
document.getElementById('emptyState').style.display = 'none';
|
||||
}
|
||||
|
||||
// Check if there are new logs
|
||||
if (newLogs.length > logs.length) {
|
||||
logs = newLogs;
|
||||
renderLogs();
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to load logs:', error);
|
||||
document.getElementById('connectionStatus').textContent = 'Disconnected';
|
||||
}
|
||||
}
|
||||
|
||||
// Render logs
|
||||
function renderLogs() {
|
||||
const console = document.getElementById('console');
|
||||
const wasAtBottom = console.scrollHeight - console.scrollTop === console.clientHeight;
|
||||
|
||||
// Clear existing logs
|
||||
console.innerHTML = '';
|
||||
|
||||
// Apply filters
|
||||
let filteredLogs = logs;
|
||||
|
||||
if (currentFilter !== 'all') {
|
||||
filteredLogs = logs.filter(log => log.level.toLowerCase() === currentFilter);
|
||||
}
|
||||
|
||||
if (searchTerm) {
|
||||
filteredLogs = filteredLogs.filter(log =>
|
||||
log.message.toLowerCase().includes(searchTerm.toLowerCase())
|
||||
);
|
||||
}
|
||||
|
||||
// Render filtered logs
|
||||
filteredLogs.forEach(log => {
|
||||
const entry = createLogEntry(log);
|
||||
console.appendChild(entry);
|
||||
});
|
||||
|
||||
// Update stats
|
||||
updateStats();
|
||||
|
||||
// Auto-scroll if enabled and was at bottom
|
||||
if (autoScroll && wasAtBottom) {
|
||||
console.scrollTop = console.scrollHeight;
|
||||
}
|
||||
}
|
||||
|
||||
// Create log entry element
|
||||
function createLogEntry(log) {
|
||||
const entry = document.createElement('div');
|
||||
entry.className = 'log-entry';
|
||||
|
||||
const timestamp = document.createElement('span');
|
||||
timestamp.className = 'log-timestamp';
|
||||
timestamp.textContent = new Date(log.timestamp).toLocaleTimeString();
|
||||
|
||||
const level = document.createElement('span');
|
||||
level.className = `log-level ${log.level.toLowerCase()}`;
|
||||
level.textContent = log.level;
|
||||
|
||||
const message = document.createElement('span');
|
||||
message.className = 'log-message';
|
||||
message.textContent = log.message;
|
||||
|
||||
entry.appendChild(timestamp);
|
||||
entry.appendChild(level);
|
||||
entry.appendChild(message);
|
||||
|
||||
return entry;
|
||||
}
|
||||
|
||||
// Update statistics
|
||||
function updateStats() {
|
||||
document.getElementById('totalCount').textContent = logs.length;
|
||||
document.getElementById('errorCount').textContent = logs.filter(l => l.level === 'error').length;
|
||||
document.getElementById('warnCount').textContent = logs.filter(l => l.level === 'warn').length;
|
||||
}
|
||||
|
||||
// Filter logs by level
|
||||
function setLogFilter(level) {
|
||||
currentFilter = level;
|
||||
|
||||
// Update button states
|
||||
document.querySelectorAll('.filter-button').forEach(btn => {
|
||||
btn.classList.toggle('active', btn.dataset.level === level);
|
||||
});
|
||||
|
||||
renderLogs();
|
||||
}
|
||||
|
||||
// Filter logs by search term
|
||||
function filterLogs() {
|
||||
searchTerm = document.getElementById('searchInput').value;
|
||||
renderLogs();
|
||||
}
|
||||
|
||||
// Clear console
|
||||
function clearConsole() {
|
||||
logs = [];
|
||||
renderLogs();
|
||||
}
|
||||
|
||||
// Export logs
|
||||
async function exportLogs() {
|
||||
try {
|
||||
await invoke('export_logs');
|
||||
} catch (error) {
|
||||
console.error('Failed to export logs:', error);
|
||||
}
|
||||
}
|
||||
|
||||
// Toggle auto-scroll
|
||||
function toggleAutoScroll() {
|
||||
autoScroll = !autoScroll;
|
||||
document.getElementById('autoScrollBtn').textContent = `Auto-scroll: ${autoScroll ? 'ON' : 'OFF'}`;
|
||||
}
|
||||
|
||||
// Toggle server
|
||||
async function toggleServer() {
|
||||
try {
|
||||
if (isServerRunning) {
|
||||
await invoke('stop_server');
|
||||
} else {
|
||||
await invoke('start_server');
|
||||
}
|
||||
await loadServerStatus();
|
||||
} catch (error) {
|
||||
console.error('Failed to toggle server:', error);
|
||||
}
|
||||
}
|
||||
|
||||
// Cleanup on window close
|
||||
appWindow.onCloseRequested(async () => {
|
||||
clearInterval(updateInterval);
|
||||
});
|
||||
|
||||
// Start the app
|
||||
init();
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
1286
tauri/public/settings.html
Normal file
1286
tauri/public/settings.html
Normal file
File diff suppressed because it is too large
Load diff
|
|
@ -82,13 +82,20 @@ reqwest = { version = "0.12", features = ["json"] }
|
|||
base64 = "0.22"
|
||||
sha2 = "0.10"
|
||||
|
||||
# Debug features
|
||||
num_cpus = "1"
|
||||
|
||||
# Network utilities
|
||||
[target.'cfg(unix)'.dependencies]
|
||||
nix = { version = "0.27", features = ["net"] }
|
||||
|
||||
[target.'cfg(windows)'.dependencies]
|
||||
ipconfig = "0.3"
|
||||
windows = { version = "0.58", features = ["Win32_Foundation", "Win32_Security", "Win32_System_Threading", "Win32_UI_WindowsAndMessaging"] }
|
||||
|
||||
[target.'cfg(not(any(target_os = "android", target_os = "ios")))'.dependencies]
|
||||
tauri-plugin-single-instance = "2.0.1"
|
||||
|
||||
# Platform-specific dependencies
|
||||
[target.'cfg(windows)'.dependencies]
|
||||
windows = { version = "0.58", features = ["Win32_Foundation", "Win32_Security", "Win32_System_Threading", "Win32_UI_WindowsAndMessaging"] }
|
||||
|
||||
[profile.release]
|
||||
panic = "abort"
|
||||
codegen-units = 1
|
||||
|
|
|
|||
BIN
tauri/src-tauri/public/icon.png
Normal file
BIN
tauri/src-tauri/public/icon.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 954 KiB |
634
tauri/src-tauri/public/server-console.html
Normal file
634
tauri/src-tauri/public/server-console.html
Normal file
|
|
@ -0,0 +1,634 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>Server Console - VibeTunnel</title>
|
||||
<style>
|
||||
:root {
|
||||
/* Light mode colors */
|
||||
--bg-color: #f5f5f7;
|
||||
--window-bg: #ffffff;
|
||||
--text-primary: #1d1d1f;
|
||||
--text-secondary: #86868b;
|
||||
--text-tertiary: #c7c7cc;
|
||||
--accent-color: #007aff;
|
||||
--accent-hover: #0051d5;
|
||||
--border-color: rgba(0, 0, 0, 0.1);
|
||||
--shadow-color: rgba(0, 0, 0, 0.1);
|
||||
--console-bg: #1e1e1e;
|
||||
--console-text: #d4d4d4;
|
||||
--console-info: #3794ff;
|
||||
--console-success: #4ec9b0;
|
||||
--console-warning: #ce9178;
|
||||
--console-error: #f48771;
|
||||
--console-debug: #b5cea8;
|
||||
}
|
||||
|
||||
@media (prefers-color-scheme: dark) {
|
||||
:root {
|
||||
/* Dark mode colors */
|
||||
--bg-color: #000000;
|
||||
--window-bg: #1c1c1e;
|
||||
--text-primary: #f5f5f7;
|
||||
--text-secondary: #98989d;
|
||||
--text-tertiary: #48484a;
|
||||
--accent-color: #0a84ff;
|
||||
--accent-hover: #409cff;
|
||||
--border-color: rgba(255, 255, 255, 0.1);
|
||||
--shadow-color: rgba(0, 0, 0, 0.5);
|
||||
--console-bg: #0e0e0e;
|
||||
}
|
||||
}
|
||||
|
||||
* {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
body {
|
||||
font-family: -apple-system, BlinkMacSystemFont, 'SF Pro Display', 'Segoe UI', system-ui, sans-serif;
|
||||
background-color: var(--bg-color);
|
||||
color: var(--text-primary);
|
||||
width: 100vw;
|
||||
height: 100vh;
|
||||
overflow: hidden;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
/* Header */
|
||||
.header {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
padding: 12px 16px;
|
||||
background-color: var(--window-bg);
|
||||
border-bottom: 1px solid var(--border-color);
|
||||
}
|
||||
|
||||
.header-title {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
font-size: 14px;
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
.status-indicator {
|
||||
width: 8px;
|
||||
height: 8px;
|
||||
border-radius: 50%;
|
||||
background-color: var(--console-success);
|
||||
animation: pulse 2s ease-in-out infinite;
|
||||
}
|
||||
|
||||
.status-indicator.stopped {
|
||||
background-color: var(--console-error);
|
||||
animation: none;
|
||||
}
|
||||
|
||||
@keyframes pulse {
|
||||
0%, 100% { opacity: 1; }
|
||||
50% { opacity: 0.5; }
|
||||
}
|
||||
|
||||
.header-controls {
|
||||
display: flex;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
.button {
|
||||
padding: 6px 12px;
|
||||
font-size: 12px;
|
||||
font-weight: 500;
|
||||
border: none;
|
||||
border-radius: 4px;
|
||||
cursor: pointer;
|
||||
transition: all 0.2s ease;
|
||||
background-color: var(--accent-color);
|
||||
color: white;
|
||||
}
|
||||
|
||||
.button:hover {
|
||||
background-color: var(--accent-hover);
|
||||
}
|
||||
|
||||
.button:active {
|
||||
transform: scale(0.98);
|
||||
}
|
||||
|
||||
.button.secondary {
|
||||
background-color: transparent;
|
||||
color: var(--accent-color);
|
||||
border: 1px solid var(--accent-color);
|
||||
}
|
||||
|
||||
.button.secondary:hover {
|
||||
background-color: var(--accent-color);
|
||||
color: white;
|
||||
}
|
||||
|
||||
.button:disabled {
|
||||
opacity: 0.5;
|
||||
cursor: not-allowed;
|
||||
}
|
||||
|
||||
/* Filter Bar */
|
||||
.filter-bar {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 12px;
|
||||
padding: 8px 16px;
|
||||
background-color: var(--window-bg);
|
||||
border-bottom: 1px solid var(--border-color);
|
||||
}
|
||||
|
||||
.search-input {
|
||||
flex: 1;
|
||||
padding: 6px 12px;
|
||||
font-size: 13px;
|
||||
border: 1px solid var(--border-color);
|
||||
border-radius: 4px;
|
||||
background-color: var(--bg-color);
|
||||
color: var(--text-primary);
|
||||
outline: none;
|
||||
}
|
||||
|
||||
.search-input:focus {
|
||||
border-color: var(--accent-color);
|
||||
box-shadow: 0 0 0 2px rgba(0, 122, 255, 0.1);
|
||||
}
|
||||
|
||||
.filter-buttons {
|
||||
display: flex;
|
||||
gap: 4px;
|
||||
}
|
||||
|
||||
.filter-button {
|
||||
padding: 4px 8px;
|
||||
font-size: 11px;
|
||||
font-weight: 500;
|
||||
border: 1px solid var(--border-color);
|
||||
border-radius: 4px;
|
||||
background-color: transparent;
|
||||
color: var(--text-secondary);
|
||||
cursor: pointer;
|
||||
transition: all 0.2s ease;
|
||||
}
|
||||
|
||||
.filter-button:hover {
|
||||
background-color: var(--bg-color);
|
||||
}
|
||||
|
||||
.filter-button.active {
|
||||
background-color: var(--accent-color);
|
||||
color: white;
|
||||
border-color: var(--accent-color);
|
||||
}
|
||||
|
||||
/* Console */
|
||||
.console-container {
|
||||
flex: 1;
|
||||
background-color: var(--console-bg);
|
||||
overflow: hidden;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
.console {
|
||||
flex: 1;
|
||||
padding: 12px 16px;
|
||||
overflow-y: auto;
|
||||
font-family: 'SF Mono', Monaco, 'Cascadia Code', monospace;
|
||||
font-size: 12px;
|
||||
line-height: 1.5;
|
||||
color: var(--console-text);
|
||||
-webkit-user-select: text;
|
||||
user-select: text;
|
||||
}
|
||||
|
||||
/* Custom scrollbar */
|
||||
.console::-webkit-scrollbar {
|
||||
width: 8px;
|
||||
}
|
||||
|
||||
.console::-webkit-scrollbar-track {
|
||||
background: rgba(255, 255, 255, 0.05);
|
||||
}
|
||||
|
||||
.console::-webkit-scrollbar-thumb {
|
||||
background: rgba(255, 255, 255, 0.2);
|
||||
border-radius: 4px;
|
||||
}
|
||||
|
||||
.console::-webkit-scrollbar-thumb:hover {
|
||||
background: rgba(255, 255, 255, 0.3);
|
||||
}
|
||||
|
||||
/* Log entries */
|
||||
.log-entry {
|
||||
margin-bottom: 2px;
|
||||
padding: 2px 0;
|
||||
display: flex;
|
||||
align-items: flex-start;
|
||||
gap: 8px;
|
||||
opacity: 0;
|
||||
animation: fadeIn 0.2s ease-out forwards;
|
||||
}
|
||||
|
||||
@keyframes fadeIn {
|
||||
to { opacity: 1; }
|
||||
}
|
||||
|
||||
.log-timestamp {
|
||||
color: var(--text-tertiary);
|
||||
flex-shrink: 0;
|
||||
font-size: 11px;
|
||||
}
|
||||
|
||||
.log-level {
|
||||
font-weight: 600;
|
||||
text-transform: uppercase;
|
||||
font-size: 10px;
|
||||
padding: 2px 6px;
|
||||
border-radius: 3px;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.log-level.trace {
|
||||
color: var(--console-debug);
|
||||
background-color: rgba(181, 206, 168, 0.1);
|
||||
}
|
||||
|
||||
.log-level.debug {
|
||||
color: var(--console-debug);
|
||||
background-color: rgba(181, 206, 168, 0.1);
|
||||
}
|
||||
|
||||
.log-level.info {
|
||||
color: var(--console-info);
|
||||
background-color: rgba(55, 148, 255, 0.1);
|
||||
}
|
||||
|
||||
.log-level.warn {
|
||||
color: var(--console-warning);
|
||||
background-color: rgba(206, 145, 120, 0.1);
|
||||
}
|
||||
|
||||
.log-level.error {
|
||||
color: var(--console-error);
|
||||
background-color: rgba(244, 135, 113, 0.1);
|
||||
}
|
||||
|
||||
.log-level.success {
|
||||
color: var(--console-success);
|
||||
background-color: rgba(78, 201, 176, 0.1);
|
||||
}
|
||||
|
||||
.log-message {
|
||||
flex: 1;
|
||||
word-wrap: break-word;
|
||||
}
|
||||
|
||||
.log-entry.hidden {
|
||||
display: none;
|
||||
}
|
||||
|
||||
/* Footer */
|
||||
.footer {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
padding: 8px 16px;
|
||||
background-color: var(--window-bg);
|
||||
border-top: 1px solid var(--border-color);
|
||||
font-size: 11px;
|
||||
color: var(--text-secondary);
|
||||
}
|
||||
|
||||
.log-stats {
|
||||
display: flex;
|
||||
gap: 16px;
|
||||
}
|
||||
|
||||
.stat-item {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
}
|
||||
|
||||
.stat-count {
|
||||
font-weight: 600;
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
/* Empty state */
|
||||
.empty-state {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
height: 100%;
|
||||
color: var(--text-tertiary);
|
||||
font-size: 14px;
|
||||
text-align: center;
|
||||
padding: 20px;
|
||||
}
|
||||
|
||||
.empty-icon {
|
||||
width: 48px;
|
||||
height: 48px;
|
||||
margin-bottom: 16px;
|
||||
opacity: 0.3;
|
||||
}
|
||||
|
||||
/* Loading state */
|
||||
.loading {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
height: 100%;
|
||||
color: var(--text-secondary);
|
||||
}
|
||||
|
||||
.spinner {
|
||||
width: 24px;
|
||||
height: 24px;
|
||||
border: 2px solid var(--border-color);
|
||||
border-top-color: var(--accent-color);
|
||||
border-radius: 50%;
|
||||
animation: spin 0.8s linear infinite;
|
||||
margin-right: 12px;
|
||||
}
|
||||
|
||||
@keyframes spin {
|
||||
to { transform: rotate(360deg); }
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="header">
|
||||
<div class="header-title">
|
||||
<div class="status-indicator" id="statusIndicator"></div>
|
||||
<span>Server Console</span>
|
||||
<span id="serverInfo" style="color: var(--text-secondary); font-weight: normal;">Port 4020</span>
|
||||
</div>
|
||||
<div class="header-controls">
|
||||
<button class="button secondary" onclick="clearConsole()">Clear</button>
|
||||
<button class="button secondary" onclick="exportLogs()">Export</button>
|
||||
<button class="button secondary" onclick="toggleAutoScroll()" id="autoScrollBtn">Auto-scroll: ON</button>
|
||||
<button class="button" onclick="toggleServer()" id="serverToggleBtn">Stop Server</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="filter-bar">
|
||||
<input type="text" class="search-input" placeholder="Search logs..." id="searchInput" oninput="filterLogs()">
|
||||
<div class="filter-buttons">
|
||||
<button class="filter-button active" data-level="all" onclick="setLogFilter('all')">All</button>
|
||||
<button class="filter-button" data-level="error" onclick="setLogFilter('error')">Errors</button>
|
||||
<button class="filter-button" data-level="warn" onclick="setLogFilter('warn')">Warnings</button>
|
||||
<button class="filter-button" data-level="info" onclick="setLogFilter('info')">Info</button>
|
||||
<button class="filter-button" data-level="debug" onclick="setLogFilter('debug')">Debug</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="console-container">
|
||||
<div class="console" id="console">
|
||||
<div class="loading" id="loadingState">
|
||||
<div class="spinner"></div>
|
||||
<span>Connecting to server...</span>
|
||||
</div>
|
||||
<div class="empty-state" id="emptyState" style="display: none;">
|
||||
<svg class="empty-icon" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M9 12h6m-6 4h6m2 5H7a2 2 0 01-2-2V5a2 2 0 012-2h5.586a1 1 0 01.707.293l5.414 5.414a1 1 0 01.293.707V19a2 2 0 01-2 2z"></path>
|
||||
</svg>
|
||||
<p>No logs yet</p>
|
||||
<p style="font-size: 12px; margin-top: 8px;">Server logs will appear here when activity occurs</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="footer">
|
||||
<div class="log-stats">
|
||||
<div class="stat-item">
|
||||
<span>Total:</span>
|
||||
<span class="stat-count" id="totalCount">0</span>
|
||||
</div>
|
||||
<div class="stat-item">
|
||||
<span>Errors:</span>
|
||||
<span class="stat-count" id="errorCount">0</span>
|
||||
</div>
|
||||
<div class="stat-item">
|
||||
<span>Warnings:</span>
|
||||
<span class="stat-count" id="warnCount">0</span>
|
||||
</div>
|
||||
</div>
|
||||
<div id="connectionStatus">Connected</div>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
const { invoke } = window.__TAURI__.tauri;
|
||||
const { appWindow } = window.__TAURI__.window;
|
||||
const { open } = window.__TAURI__.shell;
|
||||
|
||||
let logs = [];
|
||||
let autoScroll = true;
|
||||
let currentFilter = 'all';
|
||||
let searchTerm = '';
|
||||
let isServerRunning = true;
|
||||
let updateInterval;
|
||||
|
||||
// Initialize
|
||||
async function init() {
|
||||
await loadServerStatus();
|
||||
await loadLogs();
|
||||
|
||||
// Start periodic updates
|
||||
updateInterval = setInterval(async () => {
|
||||
await loadServerStatus();
|
||||
await loadLogs();
|
||||
}, 1000);
|
||||
}
|
||||
|
||||
// Load server status
|
||||
async function loadServerStatus() {
|
||||
try {
|
||||
const status = await invoke('get_server_status');
|
||||
isServerRunning = status.running;
|
||||
|
||||
const indicator = document.getElementById('statusIndicator');
|
||||
const toggleBtn = document.getElementById('serverToggleBtn');
|
||||
const serverInfo = document.getElementById('serverInfo');
|
||||
|
||||
if (isServerRunning) {
|
||||
indicator.classList.remove('stopped');
|
||||
toggleBtn.textContent = 'Stop Server';
|
||||
serverInfo.textContent = `Port ${status.port}`;
|
||||
} else {
|
||||
indicator.classList.add('stopped');
|
||||
toggleBtn.textContent = 'Start Server';
|
||||
serverInfo.textContent = 'Stopped';
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to load server status:', error);
|
||||
}
|
||||
}
|
||||
|
||||
// Load logs
|
||||
async function loadLogs() {
|
||||
try {
|
||||
const newLogs = await invoke('get_server_logs', { limit: 1000 });
|
||||
|
||||
// Hide loading state
|
||||
document.getElementById('loadingState').style.display = 'none';
|
||||
|
||||
if (newLogs.length === 0 && logs.length === 0) {
|
||||
document.getElementById('emptyState').style.display = 'flex';
|
||||
return;
|
||||
} else {
|
||||
document.getElementById('emptyState').style.display = 'none';
|
||||
}
|
||||
|
||||
// Check if there are new logs
|
||||
if (newLogs.length > logs.length) {
|
||||
logs = newLogs;
|
||||
renderLogs();
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to load logs:', error);
|
||||
document.getElementById('connectionStatus').textContent = 'Disconnected';
|
||||
}
|
||||
}
|
||||
|
||||
// Render logs
|
||||
function renderLogs() {
|
||||
const console = document.getElementById('console');
|
||||
const wasAtBottom = console.scrollHeight - console.scrollTop === console.clientHeight;
|
||||
|
||||
// Clear existing logs
|
||||
console.innerHTML = '';
|
||||
|
||||
// Apply filters
|
||||
let filteredLogs = logs;
|
||||
|
||||
if (currentFilter !== 'all') {
|
||||
filteredLogs = logs.filter(log => log.level.toLowerCase() === currentFilter);
|
||||
}
|
||||
|
||||
if (searchTerm) {
|
||||
filteredLogs = filteredLogs.filter(log =>
|
||||
log.message.toLowerCase().includes(searchTerm.toLowerCase())
|
||||
);
|
||||
}
|
||||
|
||||
// Render filtered logs
|
||||
filteredLogs.forEach(log => {
|
||||
const entry = createLogEntry(log);
|
||||
console.appendChild(entry);
|
||||
});
|
||||
|
||||
// Update stats
|
||||
updateStats();
|
||||
|
||||
// Auto-scroll if enabled and was at bottom
|
||||
if (autoScroll && wasAtBottom) {
|
||||
console.scrollTop = console.scrollHeight;
|
||||
}
|
||||
}
|
||||
|
||||
// Create log entry element
|
||||
function createLogEntry(log) {
|
||||
const entry = document.createElement('div');
|
||||
entry.className = 'log-entry';
|
||||
|
||||
const timestamp = document.createElement('span');
|
||||
timestamp.className = 'log-timestamp';
|
||||
timestamp.textContent = new Date(log.timestamp).toLocaleTimeString();
|
||||
|
||||
const level = document.createElement('span');
|
||||
level.className = `log-level ${log.level.toLowerCase()}`;
|
||||
level.textContent = log.level;
|
||||
|
||||
const message = document.createElement('span');
|
||||
message.className = 'log-message';
|
||||
message.textContent = log.message;
|
||||
|
||||
entry.appendChild(timestamp);
|
||||
entry.appendChild(level);
|
||||
entry.appendChild(message);
|
||||
|
||||
return entry;
|
||||
}
|
||||
|
||||
// Update statistics
|
||||
function updateStats() {
|
||||
document.getElementById('totalCount').textContent = logs.length;
|
||||
document.getElementById('errorCount').textContent = logs.filter(l => l.level === 'error').length;
|
||||
document.getElementById('warnCount').textContent = logs.filter(l => l.level === 'warn').length;
|
||||
}
|
||||
|
||||
// Filter logs by level
|
||||
function setLogFilter(level) {
|
||||
currentFilter = level;
|
||||
|
||||
// Update button states
|
||||
document.querySelectorAll('.filter-button').forEach(btn => {
|
||||
btn.classList.toggle('active', btn.dataset.level === level);
|
||||
});
|
||||
|
||||
renderLogs();
|
||||
}
|
||||
|
||||
// Filter logs by search term
|
||||
function filterLogs() {
|
||||
searchTerm = document.getElementById('searchInput').value;
|
||||
renderLogs();
|
||||
}
|
||||
|
||||
// Clear console
|
||||
function clearConsole() {
|
||||
logs = [];
|
||||
renderLogs();
|
||||
}
|
||||
|
||||
// Export logs
|
||||
async function exportLogs() {
|
||||
try {
|
||||
await invoke('export_logs');
|
||||
} catch (error) {
|
||||
console.error('Failed to export logs:', error);
|
||||
}
|
||||
}
|
||||
|
||||
// Toggle auto-scroll
|
||||
function toggleAutoScroll() {
|
||||
autoScroll = !autoScroll;
|
||||
document.getElementById('autoScrollBtn').textContent = `Auto-scroll: ${autoScroll ? 'ON' : 'OFF'}`;
|
||||
}
|
||||
|
||||
// Toggle server
|
||||
async function toggleServer() {
|
||||
try {
|
||||
if (isServerRunning) {
|
||||
await invoke('stop_server');
|
||||
} else {
|
||||
await invoke('start_server');
|
||||
}
|
||||
await loadServerStatus();
|
||||
} catch (error) {
|
||||
console.error('Failed to toggle server:', error);
|
||||
}
|
||||
}
|
||||
|
||||
// Cleanup on window close
|
||||
appWindow.onCloseRequested(async () => {
|
||||
clearInterval(updateInterval);
|
||||
});
|
||||
|
||||
// Start the app
|
||||
init();
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
1286
tauri/src-tauri/public/settings.html
Normal file
1286
tauri/src-tauri/public/settings.html
Normal file
File diff suppressed because it is too large
Load diff
417
tauri/src-tauri/public/welcome.html
Normal file
417
tauri/src-tauri/public/welcome.html
Normal file
|
|
@ -0,0 +1,417 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>Welcome to VibeTunnel</title>
|
||||
<style>
|
||||
:root {
|
||||
/* Light mode colors */
|
||||
--bg-color: #ffffff;
|
||||
--window-bg: #f5f5f7;
|
||||
--text-primary: #1d1d1f;
|
||||
--text-secondary: #86868b;
|
||||
--accent-color: #007aff;
|
||||
--accent-hover: #0051d5;
|
||||
--border-color: rgba(0, 0, 0, 0.1);
|
||||
--shadow-color: rgba(0, 0, 0, 0.15);
|
||||
--indicator-inactive: rgba(134, 134, 139, 0.3);
|
||||
}
|
||||
|
||||
@media (prefers-color-scheme: dark) {
|
||||
:root {
|
||||
/* Dark mode colors */
|
||||
--bg-color: #1c1c1e;
|
||||
--window-bg: #000000;
|
||||
--text-primary: #f5f5f7;
|
||||
--text-secondary: #98989d;
|
||||
--accent-color: #0a84ff;
|
||||
--accent-hover: #409cff;
|
||||
--border-color: rgba(255, 255, 255, 0.1);
|
||||
--shadow-color: rgba(0, 0, 0, 0.5);
|
||||
--indicator-inactive: rgba(152, 152, 157, 0.3);
|
||||
}
|
||||
}
|
||||
|
||||
* {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
body {
|
||||
font-family: -apple-system, BlinkMacSystemFont, 'SF Pro Display', 'Segoe UI', system-ui, sans-serif;
|
||||
background-color: var(--bg-color);
|
||||
color: var(--text-primary);
|
||||
width: 100vw;
|
||||
height: 100vh;
|
||||
overflow: hidden;
|
||||
-webkit-user-select: none;
|
||||
user-select: none;
|
||||
}
|
||||
|
||||
.container {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
background-color: var(--window-bg);
|
||||
}
|
||||
|
||||
.content {
|
||||
flex: 1;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
padding: 40px;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.page {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
display: none;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
animation: slideIn 0.3s ease-out;
|
||||
}
|
||||
|
||||
.page.active {
|
||||
display: flex;
|
||||
}
|
||||
|
||||
@keyframes slideIn {
|
||||
from {
|
||||
opacity: 0;
|
||||
transform: translateX(20px);
|
||||
}
|
||||
to {
|
||||
opacity: 1;
|
||||
transform: translateX(0);
|
||||
}
|
||||
}
|
||||
|
||||
.app-icon {
|
||||
width: 156px;
|
||||
height: 156px;
|
||||
margin-bottom: 40px;
|
||||
filter: drop-shadow(0 10px 20px var(--shadow-color));
|
||||
border-radius: 27.6%;
|
||||
}
|
||||
|
||||
.text-content {
|
||||
text-align: center;
|
||||
max-width: 480px;
|
||||
}
|
||||
|
||||
h1 {
|
||||
font-size: 40px;
|
||||
font-weight: 600;
|
||||
margin-bottom: 20px;
|
||||
letter-spacing: -0.5px;
|
||||
}
|
||||
|
||||
.subtitle {
|
||||
font-size: 16px;
|
||||
color: var(--text-secondary);
|
||||
line-height: 1.5;
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
|
||||
.description {
|
||||
font-size: 16px;
|
||||
color: var(--text-secondary);
|
||||
line-height: 1.5;
|
||||
}
|
||||
|
||||
.navigation {
|
||||
height: 92px;
|
||||
padding: 0 20px;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
justify-content: space-between;
|
||||
border-top: 1px solid var(--border-color);
|
||||
background-color: var(--bg-color);
|
||||
}
|
||||
|
||||
.indicators {
|
||||
height: 32px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
gap: 8px;
|
||||
padding-top: 12px;
|
||||
}
|
||||
|
||||
.indicator {
|
||||
width: 8px;
|
||||
height: 8px;
|
||||
border-radius: 50%;
|
||||
background-color: var(--indicator-inactive);
|
||||
border: none;
|
||||
cursor: pointer;
|
||||
transition: background-color 0.2s ease;
|
||||
}
|
||||
|
||||
.indicator:hover {
|
||||
background-color: var(--text-secondary);
|
||||
}
|
||||
|
||||
.indicator.active {
|
||||
background-color: var(--accent-color);
|
||||
}
|
||||
|
||||
.button-container {
|
||||
height: 60px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: flex-end;
|
||||
}
|
||||
|
||||
.next-button {
|
||||
min-width: 80px;
|
||||
padding: 8px 20px;
|
||||
background-color: var(--accent-color);
|
||||
color: white;
|
||||
border: none;
|
||||
border-radius: 6px;
|
||||
font-size: 14px;
|
||||
font-weight: 500;
|
||||
cursor: pointer;
|
||||
transition: background-color 0.2s ease;
|
||||
}
|
||||
|
||||
.next-button:hover {
|
||||
background-color: var(--accent-hover);
|
||||
}
|
||||
|
||||
.next-button:active {
|
||||
transform: scale(0.98);
|
||||
}
|
||||
|
||||
/* Additional pages content */
|
||||
.feature-list {
|
||||
margin-top: 30px;
|
||||
text-align: left;
|
||||
max-width: 400px;
|
||||
}
|
||||
|
||||
.feature-item {
|
||||
display: flex;
|
||||
align-items: flex-start;
|
||||
margin-bottom: 16px;
|
||||
color: var(--text-secondary);
|
||||
font-size: 15px;
|
||||
line-height: 1.5;
|
||||
}
|
||||
|
||||
.feature-icon {
|
||||
width: 20px;
|
||||
height: 20px;
|
||||
margin-right: 12px;
|
||||
flex-shrink: 0;
|
||||
color: var(--accent-color);
|
||||
}
|
||||
|
||||
.code-block {
|
||||
background-color: var(--bg-color);
|
||||
border: 1px solid var(--border-color);
|
||||
border-radius: 6px;
|
||||
padding: 16px;
|
||||
margin: 20px 0;
|
||||
font-family: 'SF Mono', Monaco, 'Cascadia Code', monospace;
|
||||
font-size: 13px;
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
.button-group {
|
||||
display: flex;
|
||||
gap: 12px;
|
||||
margin-top: 20px;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
.secondary-button {
|
||||
padding: 8px 20px;
|
||||
background-color: transparent;
|
||||
color: var(--accent-color);
|
||||
border: 1px solid var(--accent-color);
|
||||
border-radius: 6px;
|
||||
font-size: 14px;
|
||||
font-weight: 500;
|
||||
cursor: pointer;
|
||||
transition: all 0.2s ease;
|
||||
}
|
||||
|
||||
.secondary-button:hover {
|
||||
background-color: var(--accent-color);
|
||||
color: white;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="container">
|
||||
<div class="content">
|
||||
<!-- Page 1: Welcome -->
|
||||
<div class="page active" id="page-0">
|
||||
<img src="icon.png" alt="VibeTunnel" class="app-icon">
|
||||
<div class="text-content">
|
||||
<h1>Welcome to VibeTunnel</h1>
|
||||
<p class="subtitle">Turn any browser into your terminal. Command your agents on the go.</p>
|
||||
<p class="description">
|
||||
You'll be quickly guided through the basics of VibeTunnel.<br>
|
||||
This screen can always be opened from the settings.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Page 2: Features -->
|
||||
<div class="page" id="page-1">
|
||||
<img src="icon.png" alt="VibeTunnel" class="app-icon">
|
||||
<div class="text-content">
|
||||
<h1>What VibeTunnel Does</h1>
|
||||
<p class="subtitle">A secure terminal server that runs on your machine</p>
|
||||
<div class="feature-list">
|
||||
<div class="feature-item">
|
||||
<svg class="feature-icon" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M9 12l2 2 4-4m6 2a9 9 0 11-18 0 9 9 0 0118 0z"></path>
|
||||
</svg>
|
||||
<span>Access your terminal from any web browser</span>
|
||||
</div>
|
||||
<div class="feature-item">
|
||||
<svg class="feature-icon" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M9 12l2 2 4-4m6 2a9 9 0 11-18 0 9 9 0 0118 0z"></path>
|
||||
</svg>
|
||||
<span>Create multiple isolated terminal sessions</span>
|
||||
</div>
|
||||
<div class="feature-item">
|
||||
<svg class="feature-icon" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M9 12l2 2 4-4m6 2a9 9 0 11-18 0 9 9 0 0118 0z"></path>
|
||||
</svg>
|
||||
<span>Secure with password protection</span>
|
||||
</div>
|
||||
<div class="feature-item">
|
||||
<svg class="feature-icon" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M9 12l2 2 4-4m6 2a9 9 0 11-18 0 9 9 0 0118 0z"></path>
|
||||
</svg>
|
||||
<span>Works with ngrok or Tailscale for remote access</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Page 3: Dashboard Access -->
|
||||
<div class="page" id="page-2">
|
||||
<img src="icon.png" alt="VibeTunnel" class="app-icon">
|
||||
<div class="text-content">
|
||||
<h1>Accessing Your Dashboard</h1>
|
||||
<p class="subtitle">
|
||||
To access your terminals from any device, create a tunnel from your device.<br><br>
|
||||
This can be done via <strong>ngrok</strong> in settings or <strong>Tailscale</strong> (recommended).
|
||||
</p>
|
||||
<div class="button-group">
|
||||
<button class="secondary-button" onclick="openDashboard()">
|
||||
Open Dashboard
|
||||
</button>
|
||||
<button class="secondary-button" onclick="openTailscale()">
|
||||
Learn about Tailscale
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Page 4: Getting Started -->
|
||||
<div class="page" id="page-3">
|
||||
<img src="icon.png" alt="VibeTunnel" class="app-icon">
|
||||
<div class="text-content">
|
||||
<h1>You're All Set!</h1>
|
||||
<p class="subtitle">VibeTunnel is now running in your system tray</p>
|
||||
<p class="description">
|
||||
Click the VibeTunnel icon in your system tray to access settings,<br>
|
||||
open the dashboard, or manage your terminal sessions.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="navigation">
|
||||
<div class="indicators">
|
||||
<button class="indicator active" onclick="goToPage(0)"></button>
|
||||
<button class="indicator" onclick="goToPage(1)"></button>
|
||||
<button class="indicator" onclick="goToPage(2)"></button>
|
||||
<button class="indicator" onclick="goToPage(3)"></button>
|
||||
</div>
|
||||
<div class="button-container">
|
||||
<button class="next-button" id="nextButton" onclick="handleNext()">Next</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
const { invoke } = window.__TAURI__.tauri;
|
||||
const { open } = window.__TAURI__.shell;
|
||||
const { appWindow } = window.__TAURI__.window;
|
||||
|
||||
let currentPage = 0;
|
||||
const totalPages = 4;
|
||||
|
||||
function goToPage(pageIndex) {
|
||||
if (pageIndex < 0 || pageIndex >= totalPages) return;
|
||||
|
||||
// Hide current page
|
||||
document.getElementById(`page-${currentPage}`).classList.remove('active');
|
||||
document.querySelectorAll('.indicator')[currentPage].classList.remove('active');
|
||||
|
||||
// Show new page
|
||||
currentPage = pageIndex;
|
||||
document.getElementById(`page-${currentPage}`).classList.add('active');
|
||||
document.querySelectorAll('.indicator')[currentPage].classList.add('active');
|
||||
|
||||
// Update button text
|
||||
updateNextButton();
|
||||
}
|
||||
|
||||
function updateNextButton() {
|
||||
const button = document.getElementById('nextButton');
|
||||
button.textContent = currentPage === totalPages - 1 ? 'Finish' : 'Next';
|
||||
}
|
||||
|
||||
function handleNext() {
|
||||
if (currentPage < totalPages - 1) {
|
||||
goToPage(currentPage + 1);
|
||||
} else {
|
||||
// Close the welcome window
|
||||
appWindow.close();
|
||||
}
|
||||
}
|
||||
|
||||
async function openDashboard() {
|
||||
try {
|
||||
await open('http://127.0.0.1:4020');
|
||||
} catch (error) {
|
||||
console.error('Failed to open dashboard:', error);
|
||||
}
|
||||
}
|
||||
|
||||
async function openTailscale() {
|
||||
try {
|
||||
await open('https://tailscale.com/');
|
||||
} catch (error) {
|
||||
console.error('Failed to open Tailscale:', error);
|
||||
}
|
||||
}
|
||||
|
||||
// Handle keyboard navigation
|
||||
document.addEventListener('keydown', (e) => {
|
||||
if (e.key === 'Enter') {
|
||||
handleNext();
|
||||
} else if (e.key === 'ArrowRight' && currentPage < totalPages - 1) {
|
||||
goToPage(currentPage + 1);
|
||||
} else if (e.key === 'ArrowLeft' && currentPage > 0) {
|
||||
goToPage(currentPage - 1);
|
||||
}
|
||||
});
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
648
tauri/src-tauri/src/api_testing.rs
Normal file
648
tauri/src-tauri/src/api_testing.rs
Normal file
|
|
@ -0,0 +1,648 @@
|
|||
use serde::{Serialize, Deserialize};
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::RwLock;
|
||||
use std::collections::HashMap;
|
||||
use chrono::{DateTime, Utc};
|
||||
use reqwest::Client;
|
||||
use std::time::Duration;
|
||||
|
||||
/// API test method
|
||||
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)]
|
||||
pub enum HttpMethod {
|
||||
GET,
|
||||
POST,
|
||||
PUT,
|
||||
PATCH,
|
||||
DELETE,
|
||||
HEAD,
|
||||
OPTIONS,
|
||||
}
|
||||
|
||||
impl HttpMethod {
|
||||
pub fn as_str(&self) -> &str {
|
||||
match self {
|
||||
HttpMethod::GET => "GET",
|
||||
HttpMethod::POST => "POST",
|
||||
HttpMethod::PUT => "PUT",
|
||||
HttpMethod::PATCH => "PATCH",
|
||||
HttpMethod::DELETE => "DELETE",
|
||||
HttpMethod::HEAD => "HEAD",
|
||||
HttpMethod::OPTIONS => "OPTIONS",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// API test assertion type
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
||||
pub enum AssertionType {
|
||||
StatusCode(u16),
|
||||
StatusRange { min: u16, max: u16 },
|
||||
ResponseTime { max_ms: u64 },
|
||||
HeaderExists(String),
|
||||
HeaderEquals { key: String, value: String },
|
||||
JsonPath { path: String, expected: serde_json::Value },
|
||||
BodyContains(String),
|
||||
BodyMatches(String), // Regex
|
||||
ContentType(String),
|
||||
}
|
||||
|
||||
/// API test case
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct APITest {
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
pub description: Option<String>,
|
||||
pub group: Option<String>,
|
||||
pub endpoint_url: String,
|
||||
pub method: HttpMethod,
|
||||
pub headers: HashMap<String, String>,
|
||||
pub query_params: HashMap<String, String>,
|
||||
pub body: Option<APITestBody>,
|
||||
pub auth: Option<APITestAuth>,
|
||||
pub assertions: Vec<AssertionType>,
|
||||
pub timeout_ms: u64,
|
||||
pub retry_count: u32,
|
||||
pub delay_ms: Option<u64>,
|
||||
pub save_response: bool,
|
||||
}
|
||||
|
||||
/// API test body
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub enum APITestBody {
|
||||
Json(serde_json::Value),
|
||||
Form(HashMap<String, String>),
|
||||
Text(String),
|
||||
Binary(Vec<u8>),
|
||||
}
|
||||
|
||||
/// API test authentication
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub enum APITestAuth {
|
||||
Basic { username: String, password: String },
|
||||
Bearer(String),
|
||||
ApiKey { key: String, value: String, in_header: bool },
|
||||
Custom(HashMap<String, String>),
|
||||
}
|
||||
|
||||
/// API test result
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct APITestResult {
|
||||
pub test_id: String,
|
||||
pub test_name: String,
|
||||
pub success: bool,
|
||||
pub timestamp: DateTime<Utc>,
|
||||
pub duration_ms: u64,
|
||||
pub status_code: Option<u16>,
|
||||
pub response_headers: HashMap<String, String>,
|
||||
pub response_body: Option<String>,
|
||||
pub assertion_results: Vec<AssertionResult>,
|
||||
pub error: Option<String>,
|
||||
pub retries_used: u32,
|
||||
}
|
||||
|
||||
/// Assertion result
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct AssertionResult {
|
||||
pub assertion: AssertionType,
|
||||
pub passed: bool,
|
||||
pub actual_value: Option<String>,
|
||||
pub error_message: Option<String>,
|
||||
}
|
||||
|
||||
/// API test suite
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct APITestSuite {
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
pub description: Option<String>,
|
||||
pub base_url: Option<String>,
|
||||
pub default_headers: HashMap<String, String>,
|
||||
pub default_auth: Option<APITestAuth>,
|
||||
pub tests: Vec<APITest>,
|
||||
pub setup_tests: Vec<APITest>,
|
||||
pub teardown_tests: Vec<APITest>,
|
||||
pub variables: HashMap<String, String>,
|
||||
}
|
||||
|
||||
/// API test collection
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct APITestCollection {
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
pub suites: Vec<APITestSuite>,
|
||||
pub global_variables: HashMap<String, String>,
|
||||
}
|
||||
|
||||
/// API test runner configuration
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct APITestRunnerConfig {
|
||||
pub parallel_execution: bool,
|
||||
pub max_parallel_tests: usize,
|
||||
pub stop_on_failure: bool,
|
||||
pub capture_responses: bool,
|
||||
pub follow_redirects: bool,
|
||||
pub verify_ssl: bool,
|
||||
pub proxy: Option<String>,
|
||||
pub environment_variables: HashMap<String, String>,
|
||||
}
|
||||
|
||||
impl Default for APITestRunnerConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
parallel_execution: false,
|
||||
max_parallel_tests: 5,
|
||||
stop_on_failure: false,
|
||||
capture_responses: true,
|
||||
follow_redirects: true,
|
||||
verify_ssl: true,
|
||||
proxy: None,
|
||||
environment_variables: HashMap::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// API test history entry
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct APITestHistoryEntry {
|
||||
pub run_id: String,
|
||||
pub timestamp: DateTime<Utc>,
|
||||
pub suite_name: String,
|
||||
pub total_tests: usize,
|
||||
pub passed_tests: usize,
|
||||
pub failed_tests: usize,
|
||||
pub total_duration_ms: u64,
|
||||
pub results: Vec<APITestResult>,
|
||||
}
|
||||
|
||||
/// API testing manager
|
||||
pub struct APITestingManager {
|
||||
client: Arc<Client>,
|
||||
config: Arc<RwLock<APITestRunnerConfig>>,
|
||||
test_suites: Arc<RwLock<HashMap<String, APITestSuite>>>,
|
||||
test_history: Arc<RwLock<Vec<APITestHistoryEntry>>>,
|
||||
running_tests: Arc<RwLock<HashMap<String, bool>>>,
|
||||
shared_variables: Arc<RwLock<HashMap<String, String>>>,
|
||||
notification_manager: Option<Arc<crate::notification_manager::NotificationManager>>,
|
||||
}
|
||||
|
||||
impl APITestingManager {
|
||||
/// Create a new API testing manager
|
||||
pub fn new() -> Self {
|
||||
let client = Client::builder()
|
||||
.timeout(Duration::from_secs(30))
|
||||
.build()
|
||||
.unwrap();
|
||||
|
||||
Self {
|
||||
client: Arc::new(client),
|
||||
config: Arc::new(RwLock::new(APITestRunnerConfig::default())),
|
||||
test_suites: Arc::new(RwLock::new(HashMap::new())),
|
||||
test_history: Arc::new(RwLock::new(Vec::new())),
|
||||
running_tests: Arc::new(RwLock::new(HashMap::new())),
|
||||
shared_variables: Arc::new(RwLock::new(HashMap::new())),
|
||||
notification_manager: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Set the notification manager
|
||||
pub fn set_notification_manager(&mut self, notification_manager: Arc<crate::notification_manager::NotificationManager>) {
|
||||
self.notification_manager = Some(notification_manager);
|
||||
}
|
||||
|
||||
/// Get configuration
|
||||
pub async fn get_config(&self) -> APITestRunnerConfig {
|
||||
self.config.read().await.clone()
|
||||
}
|
||||
|
||||
/// Update configuration
|
||||
pub async fn update_config(&self, config: APITestRunnerConfig) {
|
||||
*self.config.write().await = config;
|
||||
}
|
||||
|
||||
/// Add test suite
|
||||
pub async fn add_test_suite(&self, suite: APITestSuite) {
|
||||
self.test_suites.write().await.insert(suite.id.clone(), suite);
|
||||
}
|
||||
|
||||
/// Get test suite
|
||||
pub async fn get_test_suite(&self, suite_id: &str) -> Option<APITestSuite> {
|
||||
self.test_suites.read().await.get(suite_id).cloned()
|
||||
}
|
||||
|
||||
/// List test suites
|
||||
pub async fn list_test_suites(&self) -> Vec<APITestSuite> {
|
||||
self.test_suites.read().await.values().cloned().collect()
|
||||
}
|
||||
|
||||
/// Run single test
|
||||
pub async fn run_test(&self, test: &APITest, variables: &HashMap<String, String>) -> APITestResult {
|
||||
let start_time = std::time::Instant::now();
|
||||
let mut result = APITestResult {
|
||||
test_id: test.id.clone(),
|
||||
test_name: test.name.clone(),
|
||||
success: false,
|
||||
timestamp: Utc::now(),
|
||||
duration_ms: 0,
|
||||
status_code: None,
|
||||
response_headers: HashMap::new(),
|
||||
response_body: None,
|
||||
assertion_results: Vec::new(),
|
||||
error: None,
|
||||
retries_used: 0,
|
||||
};
|
||||
|
||||
// Replace variables in URL
|
||||
let url = self.replace_variables(&test.endpoint_url, variables);
|
||||
|
||||
// Run test with retries
|
||||
let mut last_error = None;
|
||||
for retry in 0..=test.retry_count {
|
||||
if retry > 0 {
|
||||
// Delay between retries
|
||||
if let Some(delay) = test.delay_ms {
|
||||
tokio::time::sleep(Duration::from_millis(delay)).await;
|
||||
}
|
||||
}
|
||||
|
||||
match self.execute_request(&test, &url, variables).await {
|
||||
Ok((status, headers, body)) => {
|
||||
result.status_code = Some(status);
|
||||
result.response_headers = headers;
|
||||
if test.save_response {
|
||||
result.response_body = Some(body.clone());
|
||||
}
|
||||
result.retries_used = retry;
|
||||
|
||||
// Run assertions
|
||||
result.assertion_results = self.run_assertions(&test.assertions, status, &result.response_headers, &body).await;
|
||||
result.success = result.assertion_results.iter().all(|a| a.passed);
|
||||
|
||||
break;
|
||||
}
|
||||
Err(e) => {
|
||||
last_error = Some(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(error) = last_error {
|
||||
result.error = Some(error);
|
||||
}
|
||||
|
||||
result.duration_ms = start_time.elapsed().as_millis() as u64;
|
||||
result
|
||||
}
|
||||
|
||||
/// Run test suite
|
||||
pub async fn run_test_suite(&self, suite_id: &str) -> Option<APITestHistoryEntry> {
|
||||
let suite = self.get_test_suite(suite_id).await?;
|
||||
let run_id = uuid::Uuid::new_v4().to_string();
|
||||
let start_time = std::time::Instant::now();
|
||||
|
||||
// Merge variables
|
||||
let mut variables = self.shared_variables.read().await.clone();
|
||||
variables.extend(suite.variables.clone());
|
||||
|
||||
let mut results = Vec::new();
|
||||
|
||||
// Run setup tests
|
||||
for test in &suite.setup_tests {
|
||||
let result = self.run_test(test, &variables).await;
|
||||
if !result.success && self.config.read().await.stop_on_failure {
|
||||
break;
|
||||
}
|
||||
results.push(result);
|
||||
}
|
||||
|
||||
// Run main tests
|
||||
let config = self.config.read().await;
|
||||
if config.parallel_execution {
|
||||
// Run tests in parallel
|
||||
let mut tasks = Vec::new();
|
||||
for test in &suite.tests {
|
||||
let test = test.clone();
|
||||
let vars = variables.clone();
|
||||
let manager = self.clone_for_parallel();
|
||||
|
||||
tasks.push(tokio::spawn(async move {
|
||||
manager.run_test(&test, &vars).await
|
||||
}));
|
||||
}
|
||||
|
||||
for task in tasks {
|
||||
if let Ok(result) = task.await {
|
||||
results.push(result);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Run tests sequentially
|
||||
for test in &suite.tests {
|
||||
let result = self.run_test(test, &variables).await;
|
||||
if !result.success && config.stop_on_failure {
|
||||
break;
|
||||
}
|
||||
results.push(result);
|
||||
}
|
||||
}
|
||||
|
||||
// Run teardown tests
|
||||
for test in &suite.teardown_tests {
|
||||
let result = self.run_test(test, &variables).await;
|
||||
results.push(result);
|
||||
}
|
||||
|
||||
let total_duration = start_time.elapsed().as_millis() as u64;
|
||||
let passed = results.iter().filter(|r| r.success).count();
|
||||
let failed = results.len() - passed;
|
||||
|
||||
let history_entry = APITestHistoryEntry {
|
||||
run_id,
|
||||
timestamp: Utc::now(),
|
||||
suite_name: suite.name,
|
||||
total_tests: results.len(),
|
||||
passed_tests: passed,
|
||||
failed_tests: failed,
|
||||
total_duration_ms: total_duration,
|
||||
results,
|
||||
};
|
||||
|
||||
// Store in history
|
||||
self.test_history.write().await.push(history_entry.clone());
|
||||
|
||||
// Send notification
|
||||
if let Some(notification_manager) = &self.notification_manager {
|
||||
let message = format!(
|
||||
"Test suite completed: {} passed, {} failed",
|
||||
passed, failed
|
||||
);
|
||||
let _ = notification_manager.notify_success("API Tests", &message).await;
|
||||
}
|
||||
|
||||
Some(history_entry)
|
||||
}
|
||||
|
||||
/// Get test history
|
||||
pub async fn get_test_history(&self, limit: Option<usize>) -> Vec<APITestHistoryEntry> {
|
||||
let history = self.test_history.read().await;
|
||||
match limit {
|
||||
Some(n) => history.iter().rev().take(n).cloned().collect(),
|
||||
None => history.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Clear test history
|
||||
pub async fn clear_test_history(&self) {
|
||||
self.test_history.write().await.clear();
|
||||
}
|
||||
|
||||
/// Import Postman collection
|
||||
pub async fn import_postman_collection(&self, _json_data: &str) -> Result<String, String> {
|
||||
// TODO: Implement Postman collection import
|
||||
Err("Postman import not yet implemented".to_string())
|
||||
}
|
||||
|
||||
/// Export test suite
|
||||
pub async fn export_test_suite(&self, suite_id: &str) -> Result<String, String> {
|
||||
let suite = self.get_test_suite(suite_id).await
|
||||
.ok_or_else(|| "Test suite not found".to_string())?;
|
||||
|
||||
serde_json::to_string_pretty(&suite)
|
||||
.map_err(|e| format!("Failed to serialize test suite: {}", e))
|
||||
}
|
||||
|
||||
// Helper methods
|
||||
async fn execute_request(
|
||||
&self,
|
||||
test: &APITest,
|
||||
url: &str,
|
||||
variables: &HashMap<String, String>,
|
||||
) -> Result<(u16, HashMap<String, String>, String), String> {
|
||||
let config = self.config.read().await;
|
||||
let client = Client::builder()
|
||||
.timeout(Duration::from_millis(test.timeout_ms))
|
||||
.redirect(if config.follow_redirects {
|
||||
reqwest::redirect::Policy::default()
|
||||
} else {
|
||||
reqwest::redirect::Policy::none()
|
||||
})
|
||||
.danger_accept_invalid_certs(!config.verify_ssl)
|
||||
.build()
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
let mut request = match test.method {
|
||||
HttpMethod::GET => client.get(url),
|
||||
HttpMethod::POST => client.post(url),
|
||||
HttpMethod::PUT => client.put(url),
|
||||
HttpMethod::PATCH => client.patch(url),
|
||||
HttpMethod::DELETE => client.delete(url),
|
||||
HttpMethod::HEAD => client.head(url),
|
||||
HttpMethod::OPTIONS => client.request(reqwest::Method::OPTIONS, url),
|
||||
};
|
||||
|
||||
// Add headers
|
||||
for (key, value) in &test.headers {
|
||||
let value = self.replace_variables(value, variables);
|
||||
request = request.header(key, value);
|
||||
}
|
||||
|
||||
// Add query params
|
||||
for (key, value) in &test.query_params {
|
||||
let value = self.replace_variables(value, variables);
|
||||
request = request.query(&[(key, value)]);
|
||||
}
|
||||
|
||||
// Add auth
|
||||
if let Some(auth) = &test.auth {
|
||||
request = self.apply_auth(request, auth, variables);
|
||||
}
|
||||
|
||||
// Add body
|
||||
if let Some(body) = &test.body {
|
||||
request = match body {
|
||||
APITestBody::Json(json) => request.json(json),
|
||||
APITestBody::Form(form) => request.form(form),
|
||||
APITestBody::Text(text) => request.body(text.clone()),
|
||||
APITestBody::Binary(bytes) => request.body(bytes.clone()),
|
||||
};
|
||||
}
|
||||
|
||||
// Execute request
|
||||
let response = request.send().await.map_err(|e| e.to_string())?;
|
||||
let status = response.status().as_u16();
|
||||
|
||||
let mut headers = HashMap::new();
|
||||
for (key, value) in response.headers() {
|
||||
if let Ok(value_str) = value.to_str() {
|
||||
headers.insert(key.to_string(), value_str.to_string());
|
||||
}
|
||||
}
|
||||
|
||||
let body = response.text().await.unwrap_or_default();
|
||||
|
||||
Ok((status, headers, body))
|
||||
}
|
||||
|
||||
async fn run_assertions(
|
||||
&self,
|
||||
assertions: &[AssertionType],
|
||||
status: u16,
|
||||
headers: &HashMap<String, String>,
|
||||
body: &str,
|
||||
) -> Vec<AssertionResult> {
|
||||
let mut results = Vec::new();
|
||||
|
||||
for assertion in assertions {
|
||||
let result = match assertion {
|
||||
AssertionType::StatusCode(expected) => {
|
||||
AssertionResult {
|
||||
assertion: assertion.clone(),
|
||||
passed: status == *expected,
|
||||
actual_value: Some(status.to_string()),
|
||||
error_message: if status != *expected {
|
||||
Some(format!("Expected status {}, got {}", expected, status))
|
||||
} else {
|
||||
None
|
||||
},
|
||||
}
|
||||
}
|
||||
AssertionType::StatusRange { min, max } => {
|
||||
AssertionResult {
|
||||
assertion: assertion.clone(),
|
||||
passed: status >= *min && status <= *max,
|
||||
actual_value: Some(status.to_string()),
|
||||
error_message: if status < *min || status > *max {
|
||||
Some(format!("Expected status between {} and {}, got {}", min, max, status))
|
||||
} else {
|
||||
None
|
||||
},
|
||||
}
|
||||
}
|
||||
AssertionType::HeaderExists(key) => {
|
||||
AssertionResult {
|
||||
assertion: assertion.clone(),
|
||||
passed: headers.contains_key(key),
|
||||
actual_value: None,
|
||||
error_message: if !headers.contains_key(key) {
|
||||
Some(format!("Header '{}' not found", key))
|
||||
} else {
|
||||
None
|
||||
},
|
||||
}
|
||||
}
|
||||
AssertionType::HeaderEquals { key, value } => {
|
||||
let actual = headers.get(key);
|
||||
AssertionResult {
|
||||
assertion: assertion.clone(),
|
||||
passed: actual == Some(value),
|
||||
actual_value: actual.cloned(),
|
||||
error_message: if actual != Some(value) {
|
||||
Some(format!("Header '{}' expected '{}', got '{:?}'", key, value, actual))
|
||||
} else {
|
||||
None
|
||||
},
|
||||
}
|
||||
}
|
||||
AssertionType::BodyContains(text) => {
|
||||
AssertionResult {
|
||||
assertion: assertion.clone(),
|
||||
passed: body.contains(text),
|
||||
actual_value: None,
|
||||
error_message: if !body.contains(text) {
|
||||
Some(format!("Body does not contain '{}'", text))
|
||||
} else {
|
||||
None
|
||||
},
|
||||
}
|
||||
}
|
||||
AssertionType::JsonPath { path: _, expected: _ } => {
|
||||
// TODO: Implement JSON path assertion
|
||||
AssertionResult {
|
||||
assertion: assertion.clone(),
|
||||
passed: false,
|
||||
actual_value: None,
|
||||
error_message: Some("JSON path assertions not yet implemented".to_string()),
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
AssertionResult {
|
||||
assertion: assertion.clone(),
|
||||
passed: false,
|
||||
actual_value: None,
|
||||
error_message: Some("Assertion type not implemented".to_string()),
|
||||
}
|
||||
}
|
||||
};
|
||||
results.push(result);
|
||||
}
|
||||
|
||||
results
|
||||
}
|
||||
|
||||
fn replace_variables(&self, text: &str, variables: &HashMap<String, String>) -> String {
|
||||
let mut result = text.to_string();
|
||||
for (key, value) in variables {
|
||||
result = result.replace(&format!("{{{{{}}}}}", key), value);
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
fn apply_auth(
|
||||
&self,
|
||||
request: reqwest::RequestBuilder,
|
||||
auth: &APITestAuth,
|
||||
variables: &HashMap<String, String>,
|
||||
) -> reqwest::RequestBuilder {
|
||||
match auth {
|
||||
APITestAuth::Basic { username, password } => {
|
||||
let username = self.replace_variables(username, variables);
|
||||
let password = self.replace_variables(password, variables);
|
||||
request.basic_auth(username, Some(password))
|
||||
}
|
||||
APITestAuth::Bearer(token) => {
|
||||
let token = self.replace_variables(token, variables);
|
||||
request.bearer_auth(token)
|
||||
}
|
||||
APITestAuth::ApiKey { key, value, in_header } => {
|
||||
let key = self.replace_variables(key, variables);
|
||||
let value = self.replace_variables(value, variables);
|
||||
if *in_header {
|
||||
request.header(key, value)
|
||||
} else {
|
||||
request.query(&[(key, value)])
|
||||
}
|
||||
}
|
||||
APITestAuth::Custom(headers) => {
|
||||
let mut req = request;
|
||||
for (key, value) in headers {
|
||||
let value = self.replace_variables(value, variables);
|
||||
req = req.header(key, value);
|
||||
}
|
||||
req
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn clone_for_parallel(&self) -> Self {
|
||||
Self {
|
||||
client: self.client.clone(),
|
||||
config: self.config.clone(),
|
||||
test_suites: self.test_suites.clone(),
|
||||
test_history: self.test_history.clone(),
|
||||
running_tests: self.running_tests.clone(),
|
||||
shared_variables: self.shared_variables.clone(),
|
||||
notification_manager: self.notification_manager.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// API test statistics
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct APITestStatistics {
|
||||
pub total_suites: usize,
|
||||
pub total_tests: usize,
|
||||
pub total_runs: usize,
|
||||
pub success_rate: f64,
|
||||
pub average_duration_ms: f64,
|
||||
pub most_failed_tests: Vec<(String, usize)>,
|
||||
pub slowest_tests: Vec<(String, u64)>,
|
||||
}
|
||||
172
tauri/src-tauri/src/app_mover.rs
Normal file
172
tauri/src-tauri/src/app_mover.rs
Normal file
|
|
@ -0,0 +1,172 @@
|
|||
use tauri::{AppHandle, Manager};
|
||||
use std::path::PathBuf;
|
||||
|
||||
/// Check if the app should be moved to Applications folder
|
||||
/// This is a macOS-specific feature
|
||||
#[cfg(target_os = "macos")]
|
||||
pub async fn check_and_prompt_move(app_handle: AppHandle) -> Result<(), String> {
|
||||
use std::process::Command;
|
||||
|
||||
// Get current app bundle path
|
||||
let bundle_path = get_app_bundle_path()?;
|
||||
|
||||
// Check if already in Applications folder
|
||||
if is_in_applications_folder(&bundle_path) {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Check if we've already asked this question
|
||||
let settings = crate::settings::Settings::load().unwrap_or_default();
|
||||
if let Some(asked) = settings.general.show_welcome_on_startup {
|
||||
if !asked {
|
||||
// User has already been asked, don't ask again
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
|
||||
// Show dialog asking if user wants to move to Applications
|
||||
let response = tauri::api::dialog::blocking::ask(
|
||||
Some(&app_handle.get_webview_window("main").unwrap()),
|
||||
"Move to Applications Folder?",
|
||||
"VibeTunnel works best when run from the Applications folder. Would you like to move it there?"
|
||||
);
|
||||
|
||||
if response {
|
||||
move_to_applications_folder(bundle_path)?;
|
||||
|
||||
// Restart the app from the new location
|
||||
restart_from_applications()?;
|
||||
}
|
||||
|
||||
// Update settings to not ask again
|
||||
let mut settings = crate::settings::Settings::load().unwrap_or_default();
|
||||
settings.general.show_welcome_on_startup = Some(false);
|
||||
settings.save().ok();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "macos"))]
|
||||
pub async fn check_and_prompt_move(_app_handle: AppHandle) -> Result<(), String> {
|
||||
// Not applicable on other platforms
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
fn get_app_bundle_path() -> Result<PathBuf, String> {
|
||||
use std::env;
|
||||
|
||||
// Get the executable path
|
||||
let exe_path = env::current_exe()
|
||||
.map_err(|e| format!("Failed to get executable path: {}", e))?;
|
||||
|
||||
// Navigate up to the .app bundle
|
||||
// Typical structure: /path/to/VibeTunnel.app/Contents/MacOS/VibeTunnel
|
||||
let mut bundle_path = exe_path;
|
||||
|
||||
// Go up three levels to reach the .app bundle
|
||||
for _ in 0..3 {
|
||||
bundle_path = bundle_path.parent()
|
||||
.ok_or("Failed to find app bundle")?
|
||||
.to_path_buf();
|
||||
}
|
||||
|
||||
// Verify this is an .app bundle
|
||||
if !bundle_path.to_string_lossy().ends_with(".app") {
|
||||
return Err("Not running from an app bundle".to_string());
|
||||
}
|
||||
|
||||
Ok(bundle_path)
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
fn is_in_applications_folder(bundle_path: &PathBuf) -> bool {
|
||||
let path_str = bundle_path.to_string_lossy();
|
||||
path_str.contains("/Applications/") || path_str.contains("/System/Applications/")
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
fn move_to_applications_folder(bundle_path: PathBuf) -> Result<(), String> {
|
||||
use std::process::Command;
|
||||
use std::fs;
|
||||
|
||||
let app_name = bundle_path.file_name()
|
||||
.ok_or("Failed to get app name")?
|
||||
.to_string_lossy();
|
||||
|
||||
let dest_path = PathBuf::from("/Applications").join(&app_name);
|
||||
|
||||
// Check if destination already exists
|
||||
if dest_path.exists() {
|
||||
// Ask user if they want to replace
|
||||
let response = tauri::api::dialog::blocking::ask(
|
||||
None,
|
||||
"Replace Existing App?",
|
||||
"VibeTunnel already exists in the Applications folder. Do you want to replace it?"
|
||||
);
|
||||
|
||||
if !response {
|
||||
return Err("User cancelled move operation".to_string());
|
||||
}
|
||||
|
||||
// Remove existing app
|
||||
fs::remove_dir_all(&dest_path)
|
||||
.map_err(|e| format!("Failed to remove existing app: {}", e))?;
|
||||
}
|
||||
|
||||
// Use AppleScript to move the app with proper permissions
|
||||
let script = format!(
|
||||
r#"tell application "Finder"
|
||||
move (POSIX file "{}") to (POSIX file "/Applications/") with replacing
|
||||
end tell"#,
|
||||
bundle_path.to_string_lossy()
|
||||
);
|
||||
|
||||
let output = Command::new("osascript")
|
||||
.arg("-e")
|
||||
.arg(script)
|
||||
.output()
|
||||
.map_err(|e| format!("Failed to execute move command: {}", e))?;
|
||||
|
||||
if !output.status.success() {
|
||||
let error = String::from_utf8_lossy(&output.stderr);
|
||||
return Err(format!("Failed to move app: {}", error));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
fn restart_from_applications() -> Result<(), String> {
|
||||
use std::process::Command;
|
||||
|
||||
// Launch the app from the Applications folder
|
||||
let output = Command::new("open")
|
||||
.arg("-n")
|
||||
.arg("/Applications/VibeTunnel.app")
|
||||
.spawn()
|
||||
.map_err(|e| format!("Failed to restart app: {}", e))?;
|
||||
|
||||
// Exit the current instance
|
||||
std::process::exit(0);
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn prompt_move_to_applications(app_handle: AppHandle) -> Result<(), String> {
|
||||
check_and_prompt_move(app_handle).await
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn is_in_applications_folder() -> Result<bool, String> {
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
let bundle_path = get_app_bundle_path()?;
|
||||
Ok(is_in_applications_folder(&bundle_path))
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "macos"))]
|
||||
{
|
||||
// Always return true on non-macOS platforms
|
||||
Ok(true)
|
||||
}
|
||||
}
|
||||
483
tauri/src-tauri/src/auth_cache.rs
Normal file
483
tauri/src-tauri/src/auth_cache.rs
Normal file
|
|
@ -0,0 +1,483 @@
|
|||
use serde::{Serialize, Deserialize};
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::RwLock;
|
||||
use std::collections::HashMap;
|
||||
use chrono::{DateTime, Utc, Duration};
|
||||
use sha2::{Sha256, Digest};
|
||||
|
||||
/// Authentication token type
|
||||
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq, Hash)]
|
||||
pub enum TokenType {
|
||||
Bearer,
|
||||
Basic,
|
||||
ApiKey,
|
||||
OAuth2,
|
||||
JWT,
|
||||
Custom,
|
||||
}
|
||||
|
||||
/// Authentication scope
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Hash)]
|
||||
pub struct AuthScope {
|
||||
pub service: String,
|
||||
pub resource: Option<String>,
|
||||
pub permissions: Vec<String>,
|
||||
}
|
||||
|
||||
/// Cached authentication token
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct CachedToken {
|
||||
pub token_type: TokenType,
|
||||
pub token_value: String,
|
||||
pub scope: AuthScope,
|
||||
pub created_at: DateTime<Utc>,
|
||||
pub expires_at: Option<DateTime<Utc>>,
|
||||
pub refresh_token: Option<String>,
|
||||
pub metadata: HashMap<String, serde_json::Value>,
|
||||
}
|
||||
|
||||
impl CachedToken {
|
||||
/// Check if token is expired
|
||||
pub fn is_expired(&self) -> bool {
|
||||
if let Some(expires_at) = self.expires_at {
|
||||
Utc::now() >= expires_at
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if token needs refresh (expires within threshold)
|
||||
pub fn needs_refresh(&self, threshold_seconds: i64) -> bool {
|
||||
if let Some(expires_at) = self.expires_at {
|
||||
let refresh_time = expires_at - Duration::seconds(threshold_seconds);
|
||||
Utc::now() >= refresh_time
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
/// Get remaining lifetime in seconds
|
||||
pub fn remaining_lifetime_seconds(&self) -> Option<i64> {
|
||||
self.expires_at.map(|expires_at| {
|
||||
let duration = expires_at - Utc::now();
|
||||
duration.num_seconds().max(0)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Authentication credential
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct AuthCredential {
|
||||
pub credential_type: String,
|
||||
pub username: Option<String>,
|
||||
pub password_hash: Option<String>, // Store hashed password
|
||||
pub api_key: Option<String>,
|
||||
pub client_id: Option<String>,
|
||||
pub client_secret: Option<String>,
|
||||
pub metadata: HashMap<String, String>,
|
||||
}
|
||||
|
||||
/// Authentication cache entry
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct AuthCacheEntry {
|
||||
pub key: String,
|
||||
pub tokens: Vec<CachedToken>,
|
||||
pub credential: Option<AuthCredential>,
|
||||
pub last_accessed: DateTime<Utc>,
|
||||
pub access_count: u64,
|
||||
}
|
||||
|
||||
/// Authentication cache configuration
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct AuthCacheConfig {
|
||||
pub enabled: bool,
|
||||
pub max_entries: usize,
|
||||
pub default_ttl_seconds: u64,
|
||||
pub refresh_threshold_seconds: i64,
|
||||
pub persist_to_disk: bool,
|
||||
pub encryption_enabled: bool,
|
||||
pub cleanup_interval_seconds: u64,
|
||||
}
|
||||
|
||||
impl Default for AuthCacheConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
enabled: true,
|
||||
max_entries: 1000,
|
||||
default_ttl_seconds: 3600, // 1 hour
|
||||
refresh_threshold_seconds: 300, // 5 minutes
|
||||
persist_to_disk: false,
|
||||
encryption_enabled: true,
|
||||
cleanup_interval_seconds: 600, // 10 minutes
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Authentication cache statistics
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct AuthCacheStats {
|
||||
pub total_entries: usize,
|
||||
pub total_tokens: usize,
|
||||
pub expired_tokens: usize,
|
||||
pub cache_hits: u64,
|
||||
pub cache_misses: u64,
|
||||
pub refresh_count: u64,
|
||||
pub eviction_count: u64,
|
||||
}
|
||||
|
||||
/// Token refresh callback
|
||||
pub type TokenRefreshCallback = Arc<dyn Fn(CachedToken) -> futures::future::BoxFuture<'static, Result<CachedToken, String>> + Send + Sync>;
|
||||
|
||||
/// Authentication cache manager
|
||||
pub struct AuthCacheManager {
|
||||
config: Arc<RwLock<AuthCacheConfig>>,
|
||||
cache: Arc<RwLock<HashMap<String, AuthCacheEntry>>>,
|
||||
stats: Arc<RwLock<AuthCacheStats>>,
|
||||
refresh_callbacks: Arc<RwLock<HashMap<String, TokenRefreshCallback>>>,
|
||||
cleanup_handle: Arc<RwLock<Option<tokio::task::JoinHandle<()>>>>,
|
||||
notification_manager: Option<Arc<crate::notification_manager::NotificationManager>>,
|
||||
}
|
||||
|
||||
impl AuthCacheManager {
|
||||
/// Create a new authentication cache manager
|
||||
pub fn new() -> Self {
|
||||
let manager = Self {
|
||||
config: Arc::new(RwLock::new(AuthCacheConfig::default())),
|
||||
cache: Arc::new(RwLock::new(HashMap::new())),
|
||||
stats: Arc::new(RwLock::new(AuthCacheStats {
|
||||
total_entries: 0,
|
||||
total_tokens: 0,
|
||||
expired_tokens: 0,
|
||||
cache_hits: 0,
|
||||
cache_misses: 0,
|
||||
refresh_count: 0,
|
||||
eviction_count: 0,
|
||||
})),
|
||||
refresh_callbacks: Arc::new(RwLock::new(HashMap::new())),
|
||||
cleanup_handle: Arc::new(RwLock::new(None)),
|
||||
notification_manager: None,
|
||||
};
|
||||
|
||||
// Start cleanup task
|
||||
let cleanup_manager = manager.clone_for_cleanup();
|
||||
tokio::spawn(async move {
|
||||
cleanup_manager.start_cleanup_task().await;
|
||||
});
|
||||
|
||||
manager
|
||||
}
|
||||
|
||||
/// Set the notification manager
|
||||
pub fn set_notification_manager(&mut self, notification_manager: Arc<crate::notification_manager::NotificationManager>) {
|
||||
self.notification_manager = Some(notification_manager);
|
||||
}
|
||||
|
||||
/// Get configuration
|
||||
pub async fn get_config(&self) -> AuthCacheConfig {
|
||||
self.config.read().await.clone()
|
||||
}
|
||||
|
||||
/// Update configuration
|
||||
pub async fn update_config(&self, config: AuthCacheConfig) {
|
||||
*self.config.write().await = config;
|
||||
}
|
||||
|
||||
/// Store token in cache
|
||||
pub async fn store_token(&self, key: &str, token: CachedToken) -> Result<(), String> {
|
||||
let config = self.config.read().await;
|
||||
if !config.enabled {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut cache = self.cache.write().await;
|
||||
let mut stats = self.stats.write().await;
|
||||
|
||||
// Get or create cache entry
|
||||
let entry = cache.entry(key.to_string()).or_insert_with(|| {
|
||||
stats.total_entries += 1;
|
||||
AuthCacheEntry {
|
||||
key: key.to_string(),
|
||||
tokens: Vec::new(),
|
||||
credential: None,
|
||||
last_accessed: Utc::now(),
|
||||
access_count: 0,
|
||||
}
|
||||
});
|
||||
|
||||
// Remove expired tokens
|
||||
let expired_count = entry.tokens.iter().filter(|t| t.is_expired()).count();
|
||||
stats.expired_tokens += expired_count;
|
||||
entry.tokens.retain(|t| !t.is_expired());
|
||||
|
||||
// Add new token
|
||||
entry.tokens.push(token);
|
||||
stats.total_tokens += 1;
|
||||
entry.last_accessed = Utc::now();
|
||||
|
||||
// Check cache size limit
|
||||
if cache.len() > config.max_entries {
|
||||
self.evict_oldest_entry(&mut cache, &mut stats);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get token from cache
|
||||
pub async fn get_token(&self, key: &str, scope: &AuthScope) -> Option<CachedToken> {
|
||||
let config = self.config.read().await;
|
||||
if !config.enabled {
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut cache = self.cache.write().await;
|
||||
let mut stats = self.stats.write().await;
|
||||
|
||||
if let Some(entry) = cache.get_mut(key) {
|
||||
entry.last_accessed = Utc::now();
|
||||
entry.access_count += 1;
|
||||
|
||||
// Find matching token
|
||||
for token in &entry.tokens {
|
||||
if !token.is_expired() && self.token_matches_scope(token, scope) {
|
||||
stats.cache_hits += 1;
|
||||
|
||||
// Check if needs refresh
|
||||
if token.needs_refresh(config.refresh_threshold_seconds) {
|
||||
// Trigger refresh in background
|
||||
if let Some(refresh_callback) = self.refresh_callbacks.read().await.get(key) {
|
||||
let token_clone = token.clone();
|
||||
let callback = refresh_callback.clone();
|
||||
let key_clone = key.to_string();
|
||||
let manager = self.clone_for_refresh();
|
||||
|
||||
tokio::spawn(async move {
|
||||
if let Ok(refreshed_token) = callback(token_clone).await {
|
||||
let _ = manager.store_token(&key_clone, refreshed_token).await;
|
||||
manager.stats.write().await.refresh_count += 1;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return Some(token.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stats.cache_misses += 1;
|
||||
None
|
||||
}
|
||||
|
||||
/// Store credential in cache
|
||||
pub async fn store_credential(&self, key: &str, credential: AuthCredential) -> Result<(), String> {
|
||||
let config = self.config.read().await;
|
||||
if !config.enabled {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut cache = self.cache.write().await;
|
||||
let mut stats = self.stats.write().await;
|
||||
|
||||
let entry = cache.entry(key.to_string()).or_insert_with(|| {
|
||||
stats.total_entries += 1;
|
||||
AuthCacheEntry {
|
||||
key: key.to_string(),
|
||||
tokens: Vec::new(),
|
||||
credential: None,
|
||||
last_accessed: Utc::now(),
|
||||
access_count: 0,
|
||||
}
|
||||
});
|
||||
|
||||
entry.credential = Some(credential);
|
||||
entry.last_accessed = Utc::now();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get credential from cache
|
||||
pub async fn get_credential(&self, key: &str) -> Option<AuthCredential> {
|
||||
let config = self.config.read().await;
|
||||
if !config.enabled {
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut cache = self.cache.write().await;
|
||||
|
||||
if let Some(entry) = cache.get_mut(key) {
|
||||
entry.last_accessed = Utc::now();
|
||||
entry.access_count += 1;
|
||||
return entry.credential.clone();
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// Register token refresh callback
|
||||
pub async fn register_refresh_callback(&self, key: &str, callback: TokenRefreshCallback) {
|
||||
self.refresh_callbacks.write().await.insert(key.to_string(), callback);
|
||||
}
|
||||
|
||||
/// Clear specific cache entry
|
||||
pub async fn clear_entry(&self, key: &str) {
|
||||
let mut cache = self.cache.write().await;
|
||||
if cache.remove(key).is_some() {
|
||||
self.stats.write().await.total_entries = cache.len();
|
||||
}
|
||||
}
|
||||
|
||||
/// Clear all cache entries
|
||||
pub async fn clear_all(&self) {
|
||||
let mut cache = self.cache.write().await;
|
||||
cache.clear();
|
||||
|
||||
let mut stats = self.stats.write().await;
|
||||
stats.total_entries = 0;
|
||||
stats.total_tokens = 0;
|
||||
stats.expired_tokens = 0;
|
||||
}
|
||||
|
||||
/// Get cache statistics
|
||||
pub async fn get_stats(&self) -> AuthCacheStats {
|
||||
self.stats.read().await.clone()
|
||||
}
|
||||
|
||||
/// List all cache entries
|
||||
pub async fn list_entries(&self) -> Vec<(String, DateTime<Utc>, u64)> {
|
||||
self.cache.read().await
|
||||
.values()
|
||||
.map(|entry| (entry.key.clone(), entry.last_accessed, entry.access_count))
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Export cache to JSON (for persistence)
|
||||
pub async fn export_cache(&self) -> Result<String, String> {
|
||||
let cache = self.cache.read().await;
|
||||
let entries: Vec<_> = cache.values().cloned().collect();
|
||||
|
||||
serde_json::to_string_pretty(&entries)
|
||||
.map_err(|e| format!("Failed to serialize cache: {}", e))
|
||||
}
|
||||
|
||||
/// Import cache from JSON
|
||||
pub async fn import_cache(&self, json_data: &str) -> Result<(), String> {
|
||||
let entries: Vec<AuthCacheEntry> = serde_json::from_str(json_data)
|
||||
.map_err(|e| format!("Failed to deserialize cache: {}", e))?;
|
||||
|
||||
let mut cache = self.cache.write().await;
|
||||
let mut stats = self.stats.write().await;
|
||||
|
||||
for entry in entries {
|
||||
cache.insert(entry.key.clone(), entry);
|
||||
}
|
||||
|
||||
stats.total_entries = cache.len();
|
||||
stats.total_tokens = cache.values()
|
||||
.map(|e| e.tokens.len())
|
||||
.sum();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Hash password for secure storage
|
||||
pub fn hash_password(password: &str) -> String {
|
||||
let mut hasher = Sha256::new();
|
||||
hasher.update(password.as_bytes());
|
||||
format!("{:x}", hasher.finalize())
|
||||
}
|
||||
|
||||
// Helper methods
|
||||
fn token_matches_scope(&self, token: &CachedToken, scope: &AuthScope) -> bool {
|
||||
token.scope.service == scope.service &&
|
||||
token.scope.resource == scope.resource &&
|
||||
scope.permissions.iter().all(|p| token.scope.permissions.contains(p))
|
||||
}
|
||||
|
||||
fn evict_oldest_entry(&self, cache: &mut HashMap<String, AuthCacheEntry>, stats: &mut AuthCacheStats) {
|
||||
if let Some((key, _)) = cache.iter()
|
||||
.min_by_key(|(_, entry)| entry.last_accessed) {
|
||||
let key = key.clone();
|
||||
cache.remove(&key);
|
||||
stats.eviction_count += 1;
|
||||
stats.total_entries = cache.len();
|
||||
}
|
||||
}
|
||||
|
||||
async fn start_cleanup_task(&self) {
|
||||
let config = self.config.read().await;
|
||||
let cleanup_interval = Duration::seconds(config.cleanup_interval_seconds as i64);
|
||||
drop(config);
|
||||
|
||||
loop {
|
||||
tokio::time::sleep(cleanup_interval.to_std().unwrap()).await;
|
||||
|
||||
let config = self.config.read().await;
|
||||
if !config.enabled {
|
||||
continue;
|
||||
}
|
||||
drop(config);
|
||||
|
||||
// Clean up expired tokens
|
||||
let mut cache = self.cache.write().await;
|
||||
let mut stats = self.stats.write().await;
|
||||
let mut total_expired = 0;
|
||||
|
||||
for entry in cache.values_mut() {
|
||||
let expired_count = entry.tokens.iter().filter(|t| t.is_expired()).count();
|
||||
total_expired += expired_count;
|
||||
entry.tokens.retain(|t| !t.is_expired());
|
||||
}
|
||||
|
||||
stats.expired_tokens += total_expired;
|
||||
stats.total_tokens = cache.values()
|
||||
.map(|e| e.tokens.len())
|
||||
.sum();
|
||||
|
||||
// Remove empty entries
|
||||
cache.retain(|_, entry| !entry.tokens.is_empty() || entry.credential.is_some());
|
||||
stats.total_entries = cache.len();
|
||||
}
|
||||
}
|
||||
|
||||
fn clone_for_cleanup(&self) -> Self {
|
||||
Self {
|
||||
config: self.config.clone(),
|
||||
cache: self.cache.clone(),
|
||||
stats: self.stats.clone(),
|
||||
refresh_callbacks: self.refresh_callbacks.clone(),
|
||||
cleanup_handle: self.cleanup_handle.clone(),
|
||||
notification_manager: self.notification_manager.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
fn clone_for_refresh(&self) -> Self {
|
||||
Self {
|
||||
config: self.config.clone(),
|
||||
cache: self.cache.clone(),
|
||||
stats: self.stats.clone(),
|
||||
refresh_callbacks: self.refresh_callbacks.clone(),
|
||||
cleanup_handle: self.cleanup_handle.clone(),
|
||||
notification_manager: self.notification_manager.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a cache key from components
|
||||
pub fn create_cache_key(service: &str, username: Option<&str>, resource: Option<&str>) -> String {
|
||||
let mut components = vec![service];
|
||||
if let Some(user) = username {
|
||||
components.push(user);
|
||||
}
|
||||
if let Some(res) = resource {
|
||||
components.push(res);
|
||||
}
|
||||
components.join(":")
|
||||
}
|
||||
|
||||
/// Authentication cache error
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct AuthCacheError {
|
||||
pub code: String,
|
||||
pub message: String,
|
||||
pub details: Option<HashMap<String, String>>,
|
||||
}
|
||||
523
tauri/src-tauri/src/backend_manager.rs
Normal file
523
tauri/src-tauri/src/backend_manager.rs
Normal file
|
|
@ -0,0 +1,523 @@
|
|||
use serde::{Serialize, Deserialize};
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::RwLock;
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
use chrono::{DateTime, Utc};
|
||||
use tokio::process::Command;
|
||||
|
||||
/// Backend type enumeration
|
||||
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq, Hash)]
|
||||
pub enum BackendType {
|
||||
Rust,
|
||||
NodeJS,
|
||||
Python,
|
||||
Go,
|
||||
Custom,
|
||||
}
|
||||
|
||||
impl BackendType {
|
||||
pub fn as_str(&self) -> &str {
|
||||
match self {
|
||||
BackendType::Rust => "rust",
|
||||
BackendType::NodeJS => "nodejs",
|
||||
BackendType::Python => "python",
|
||||
BackendType::Go => "go",
|
||||
BackendType::Custom => "custom",
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_str(s: &str) -> Self {
|
||||
match s.to_lowercase().as_str() {
|
||||
"rust" => BackendType::Rust,
|
||||
"nodejs" | "node" => BackendType::NodeJS,
|
||||
"python" => BackendType::Python,
|
||||
"go" => BackendType::Go,
|
||||
_ => BackendType::Custom,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Backend status
|
||||
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)]
|
||||
pub enum BackendStatus {
|
||||
NotInstalled,
|
||||
Installing,
|
||||
Installed,
|
||||
Starting,
|
||||
Running,
|
||||
Stopping,
|
||||
Stopped,
|
||||
Error,
|
||||
}
|
||||
|
||||
/// Backend configuration
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct BackendConfig {
|
||||
pub backend_type: BackendType,
|
||||
pub name: String,
|
||||
pub version: String,
|
||||
pub executable_path: Option<PathBuf>,
|
||||
pub working_directory: Option<PathBuf>,
|
||||
pub environment_variables: HashMap<String, String>,
|
||||
pub arguments: Vec<String>,
|
||||
pub port: Option<u16>,
|
||||
pub features: BackendFeatures,
|
||||
pub requirements: BackendRequirements,
|
||||
}
|
||||
|
||||
/// Backend features
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct BackendFeatures {
|
||||
pub terminal_sessions: bool,
|
||||
pub file_browser: bool,
|
||||
pub port_forwarding: bool,
|
||||
pub authentication: bool,
|
||||
pub websocket_support: bool,
|
||||
pub rest_api: bool,
|
||||
pub graphql_api: bool,
|
||||
pub metrics: bool,
|
||||
}
|
||||
|
||||
/// Backend requirements
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct BackendRequirements {
|
||||
pub runtime: Option<String>,
|
||||
pub runtime_version: Option<String>,
|
||||
pub dependencies: Vec<String>,
|
||||
pub system_packages: Vec<String>,
|
||||
pub min_memory_mb: Option<u32>,
|
||||
pub min_disk_space_mb: Option<u32>,
|
||||
}
|
||||
|
||||
/// Backend instance information
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct BackendInstance {
|
||||
pub id: String,
|
||||
pub backend_type: BackendType,
|
||||
pub status: BackendStatus,
|
||||
pub pid: Option<u32>,
|
||||
pub port: u16,
|
||||
pub started_at: Option<DateTime<Utc>>,
|
||||
pub last_health_check: Option<DateTime<Utc>>,
|
||||
pub health_status: HealthStatus,
|
||||
pub metrics: BackendMetrics,
|
||||
}
|
||||
|
||||
/// Health status
|
||||
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq, Hash)]
|
||||
pub enum HealthStatus {
|
||||
Healthy,
|
||||
Degraded,
|
||||
Unhealthy,
|
||||
Unknown,
|
||||
}
|
||||
|
||||
/// Backend metrics
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct BackendMetrics {
|
||||
pub cpu_usage_percent: Option<f32>,
|
||||
pub memory_usage_mb: Option<u64>,
|
||||
pub request_count: u64,
|
||||
pub error_count: u64,
|
||||
pub average_response_time_ms: Option<f32>,
|
||||
pub active_connections: u32,
|
||||
}
|
||||
|
||||
/// Backend manager
|
||||
pub struct BackendManager {
|
||||
configs: Arc<RwLock<HashMap<BackendType, BackendConfig>>>,
|
||||
instances: Arc<RwLock<HashMap<String, BackendInstance>>>,
|
||||
active_backend: Arc<RwLock<Option<BackendType>>>,
|
||||
notification_manager: Option<Arc<crate::notification_manager::NotificationManager>>,
|
||||
}
|
||||
|
||||
impl BackendManager {
|
||||
/// Create a new backend manager
|
||||
pub fn new() -> Self {
|
||||
let manager = Self {
|
||||
configs: Arc::new(RwLock::new(HashMap::new())),
|
||||
instances: Arc::new(RwLock::new(HashMap::new())),
|
||||
active_backend: Arc::new(RwLock::new(Some(BackendType::Rust))),
|
||||
notification_manager: None,
|
||||
};
|
||||
|
||||
// Initialize default backend configurations
|
||||
tokio::spawn({
|
||||
let configs = manager.configs.clone();
|
||||
async move {
|
||||
let default_configs = Self::initialize_default_configs();
|
||||
*configs.write().await = default_configs;
|
||||
}
|
||||
});
|
||||
|
||||
manager
|
||||
}
|
||||
|
||||
/// Set the notification manager
|
||||
pub fn set_notification_manager(&mut self, notification_manager: Arc<crate::notification_manager::NotificationManager>) {
|
||||
self.notification_manager = Some(notification_manager);
|
||||
}
|
||||
|
||||
/// Initialize default backend configurations
|
||||
fn initialize_default_configs() -> HashMap<BackendType, BackendConfig> {
|
||||
let mut configs = HashMap::new();
|
||||
|
||||
// Rust backend (built-in)
|
||||
configs.insert(BackendType::Rust, BackendConfig {
|
||||
backend_type: BackendType::Rust,
|
||||
name: "Rust (Built-in)".to_string(),
|
||||
version: env!("CARGO_PKG_VERSION").to_string(),
|
||||
executable_path: None,
|
||||
working_directory: None,
|
||||
environment_variables: HashMap::new(),
|
||||
arguments: vec![],
|
||||
port: Some(4020),
|
||||
features: BackendFeatures {
|
||||
terminal_sessions: true,
|
||||
file_browser: true,
|
||||
port_forwarding: true,
|
||||
authentication: true,
|
||||
websocket_support: true,
|
||||
rest_api: true,
|
||||
graphql_api: false,
|
||||
metrics: true,
|
||||
},
|
||||
requirements: BackendRequirements {
|
||||
runtime: None,
|
||||
runtime_version: None,
|
||||
dependencies: vec![],
|
||||
system_packages: vec![],
|
||||
min_memory_mb: Some(64),
|
||||
min_disk_space_mb: Some(10),
|
||||
},
|
||||
});
|
||||
|
||||
// Node.js backend
|
||||
configs.insert(BackendType::NodeJS, BackendConfig {
|
||||
backend_type: BackendType::NodeJS,
|
||||
name: "Node.js Server".to_string(),
|
||||
version: "1.0.0".to_string(),
|
||||
executable_path: Some(PathBuf::from("node")),
|
||||
working_directory: None,
|
||||
environment_variables: HashMap::new(),
|
||||
arguments: vec!["server.js".to_string()],
|
||||
port: Some(4021),
|
||||
features: BackendFeatures {
|
||||
terminal_sessions: true,
|
||||
file_browser: true,
|
||||
port_forwarding: false,
|
||||
authentication: true,
|
||||
websocket_support: true,
|
||||
rest_api: true,
|
||||
graphql_api: true,
|
||||
metrics: false,
|
||||
},
|
||||
requirements: BackendRequirements {
|
||||
runtime: Some("node".to_string()),
|
||||
runtime_version: Some(">=16.0.0".to_string()),
|
||||
dependencies: vec![
|
||||
"express".to_string(),
|
||||
"socket.io".to_string(),
|
||||
"node-pty".to_string(),
|
||||
],
|
||||
system_packages: vec![],
|
||||
min_memory_mb: Some(128),
|
||||
min_disk_space_mb: Some(50),
|
||||
},
|
||||
});
|
||||
|
||||
// Python backend
|
||||
configs.insert(BackendType::Python, BackendConfig {
|
||||
backend_type: BackendType::Python,
|
||||
name: "Python Server".to_string(),
|
||||
version: "1.0.0".to_string(),
|
||||
executable_path: Some(PathBuf::from("python3")),
|
||||
working_directory: None,
|
||||
environment_variables: HashMap::new(),
|
||||
arguments: vec!["-m".to_string(), "vibetunnel_server".to_string()],
|
||||
port: Some(4022),
|
||||
features: BackendFeatures {
|
||||
terminal_sessions: true,
|
||||
file_browser: true,
|
||||
port_forwarding: false,
|
||||
authentication: true,
|
||||
websocket_support: true,
|
||||
rest_api: true,
|
||||
graphql_api: false,
|
||||
metrics: true,
|
||||
},
|
||||
requirements: BackendRequirements {
|
||||
runtime: Some("python3".to_string()),
|
||||
runtime_version: Some(">=3.8".to_string()),
|
||||
dependencies: vec![
|
||||
"fastapi".to_string(),
|
||||
"uvicorn".to_string(),
|
||||
"websockets".to_string(),
|
||||
"ptyprocess".to_string(),
|
||||
],
|
||||
system_packages: vec![],
|
||||
min_memory_mb: Some(96),
|
||||
min_disk_space_mb: Some(30),
|
||||
},
|
||||
});
|
||||
|
||||
configs
|
||||
}
|
||||
|
||||
/// Get available backends
|
||||
pub async fn get_available_backends(&self) -> Vec<BackendConfig> {
|
||||
self.configs.read().await.values().cloned().collect()
|
||||
}
|
||||
|
||||
/// Get backend configuration
|
||||
pub async fn get_backend_config(&self, backend_type: BackendType) -> Option<BackendConfig> {
|
||||
self.configs.read().await.get(&backend_type).cloned()
|
||||
}
|
||||
|
||||
/// Check if backend is installed
|
||||
pub async fn is_backend_installed(&self, backend_type: BackendType) -> bool {
|
||||
match backend_type {
|
||||
BackendType::Rust => true, // Built-in
|
||||
BackendType::NodeJS => self.check_nodejs_installed().await,
|
||||
BackendType::Python => self.check_python_installed().await,
|
||||
BackendType::Go => self.check_go_installed().await,
|
||||
BackendType::Custom => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Install backend
|
||||
pub async fn install_backend(&self, backend_type: BackendType) -> Result<(), String> {
|
||||
match backend_type {
|
||||
BackendType::Rust => Ok(()), // Already installed
|
||||
BackendType::NodeJS => self.install_nodejs_backend().await,
|
||||
BackendType::Python => self.install_python_backend().await,
|
||||
BackendType::Go => Err("Go backend not yet implemented".to_string()),
|
||||
BackendType::Custom => Err("Custom backend installation not supported".to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
/// Start backend
|
||||
pub async fn start_backend(&self, backend_type: BackendType) -> Result<String, String> {
|
||||
// Check if backend is installed
|
||||
if !self.is_backend_installed(backend_type).await {
|
||||
return Err(format!("{:?} backend is not installed", backend_type));
|
||||
}
|
||||
|
||||
// Get backend configuration
|
||||
let config = self.get_backend_config(backend_type).await
|
||||
.ok_or_else(|| "Backend configuration not found".to_string())?;
|
||||
|
||||
// Generate instance ID
|
||||
let instance_id = uuid::Uuid::new_v4().to_string();
|
||||
|
||||
// Create backend instance
|
||||
let instance = BackendInstance {
|
||||
id: instance_id.clone(),
|
||||
backend_type,
|
||||
status: BackendStatus::Starting,
|
||||
pid: None,
|
||||
port: config.port.unwrap_or(4020),
|
||||
started_at: None,
|
||||
last_health_check: None,
|
||||
health_status: HealthStatus::Unknown,
|
||||
metrics: BackendMetrics {
|
||||
cpu_usage_percent: None,
|
||||
memory_usage_mb: None,
|
||||
request_count: 0,
|
||||
error_count: 0,
|
||||
average_response_time_ms: None,
|
||||
active_connections: 0,
|
||||
},
|
||||
};
|
||||
|
||||
// Store instance
|
||||
self.instances.write().await.insert(instance_id.clone(), instance);
|
||||
|
||||
// Start backend process
|
||||
match backend_type {
|
||||
BackendType::Rust => {
|
||||
// Rust backend is handled internally
|
||||
self.update_instance_status(&instance_id, BackendStatus::Running).await;
|
||||
*self.active_backend.write().await = Some(BackendType::Rust);
|
||||
Ok(instance_id)
|
||||
}
|
||||
_ => {
|
||||
// Start external backend process
|
||||
self.start_external_backend(&instance_id, config).await
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Stop backend
|
||||
pub async fn stop_backend(&self, instance_id: &str) -> Result<(), String> {
|
||||
let instance = self.instances.read().await
|
||||
.get(instance_id)
|
||||
.cloned()
|
||||
.ok_or_else(|| "Backend instance not found".to_string())?;
|
||||
|
||||
match instance.backend_type {
|
||||
BackendType::Rust => {
|
||||
// Rust backend is handled internally
|
||||
self.update_instance_status(instance_id, BackendStatus::Stopped).await;
|
||||
Ok(())
|
||||
}
|
||||
_ => {
|
||||
// Stop external backend process
|
||||
self.stop_external_backend(instance_id).await
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Switch active backend
|
||||
pub async fn switch_backend(&self, backend_type: BackendType) -> Result<(), String> {
|
||||
// Stop current backend if different
|
||||
let current_backend = *self.active_backend.read().await;
|
||||
if let Some(current) = current_backend {
|
||||
if current != backend_type {
|
||||
// Find and stop current backend instances
|
||||
let instance_id = {
|
||||
let instances = self.instances.read().await;
|
||||
instances.iter()
|
||||
.find(|(_, instance)| instance.backend_type == current && instance.status == BackendStatus::Running)
|
||||
.map(|(id, _)| id.clone())
|
||||
};
|
||||
if let Some(id) = instance_id {
|
||||
self.stop_backend(&id).await?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Start new backend
|
||||
self.start_backend(backend_type).await?;
|
||||
|
||||
// Update active backend
|
||||
*self.active_backend.write().await = Some(backend_type);
|
||||
|
||||
// Notify about backend switch
|
||||
if let Some(notification_manager) = &self.notification_manager {
|
||||
let _ = notification_manager.notify_success(
|
||||
"Backend Switched",
|
||||
&format!("Switched to {:?} backend", backend_type)
|
||||
).await;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get active backend
|
||||
pub async fn get_active_backend(&self) -> Option<BackendType> {
|
||||
*self.active_backend.read().await
|
||||
}
|
||||
|
||||
/// Get backend instances
|
||||
pub async fn get_backend_instances(&self) -> Vec<BackendInstance> {
|
||||
self.instances.read().await.values().cloned().collect()
|
||||
}
|
||||
|
||||
/// Get backend health
|
||||
pub async fn check_backend_health(&self, instance_id: &str) -> Result<HealthStatus, String> {
|
||||
let instance = self.instances.read().await
|
||||
.get(instance_id)
|
||||
.cloned()
|
||||
.ok_or_else(|| "Backend instance not found".to_string())?;
|
||||
|
||||
if instance.status != BackendStatus::Running {
|
||||
return Ok(HealthStatus::Unknown);
|
||||
}
|
||||
|
||||
// Perform health check based on backend type
|
||||
let health_status = match instance.backend_type {
|
||||
BackendType::Rust => HealthStatus::Healthy, // Always healthy for built-in
|
||||
_ => self.check_external_backend_health(&instance).await?,
|
||||
};
|
||||
|
||||
// Update instance health status
|
||||
if let Some(instance) = self.instances.write().await.get_mut(instance_id) {
|
||||
instance.health_status = health_status;
|
||||
instance.last_health_check = Some(Utc::now());
|
||||
}
|
||||
|
||||
Ok(health_status)
|
||||
}
|
||||
|
||||
// Helper methods
|
||||
async fn check_nodejs_installed(&self) -> bool {
|
||||
Command::new("node")
|
||||
.arg("--version")
|
||||
.output()
|
||||
.await
|
||||
.map(|output| output.status.success())
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
async fn check_python_installed(&self) -> bool {
|
||||
Command::new("python3")
|
||||
.arg("--version")
|
||||
.output()
|
||||
.await
|
||||
.map(|output| output.status.success())
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
async fn check_go_installed(&self) -> bool {
|
||||
Command::new("go")
|
||||
.arg("version")
|
||||
.output()
|
||||
.await
|
||||
.map(|output| output.status.success())
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
async fn install_nodejs_backend(&self) -> Result<(), String> {
|
||||
// TODO: Implement Node.js backend installation
|
||||
// This would involve:
|
||||
// 1. Creating package.json
|
||||
// 2. Installing dependencies
|
||||
// 3. Copying server files
|
||||
Err("Node.js backend installation not yet implemented".to_string())
|
||||
}
|
||||
|
||||
async fn install_python_backend(&self) -> Result<(), String> {
|
||||
// TODO: Implement Python backend installation
|
||||
// This would involve:
|
||||
// 1. Creating virtual environment
|
||||
// 2. Installing pip dependencies
|
||||
// 3. Copying server files
|
||||
Err("Python backend installation not yet implemented".to_string())
|
||||
}
|
||||
|
||||
async fn start_external_backend(&self, _instance_id: &str, _config: BackendConfig) -> Result<String, String> {
|
||||
// TODO: Implement external backend startup
|
||||
Err("External backend startup not yet implemented".to_string())
|
||||
}
|
||||
|
||||
async fn stop_external_backend(&self, _instance_id: &str) -> Result<(), String> {
|
||||
// TODO: Implement external backend shutdown
|
||||
Err("External backend shutdown not yet implemented".to_string())
|
||||
}
|
||||
|
||||
async fn check_external_backend_health(&self, _instance: &BackendInstance) -> Result<HealthStatus, String> {
|
||||
// TODO: Implement health check for external backends
|
||||
Ok(HealthStatus::Unknown)
|
||||
}
|
||||
|
||||
async fn update_instance_status(&self, instance_id: &str, status: BackendStatus) {
|
||||
if let Some(instance) = self.instances.write().await.get_mut(instance_id) {
|
||||
instance.status = status;
|
||||
if status == BackendStatus::Running {
|
||||
instance.started_at = Some(Utc::now());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Backend statistics
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct BackendStats {
|
||||
pub total_backends: usize,
|
||||
pub installed_backends: usize,
|
||||
pub running_instances: usize,
|
||||
pub active_backend: Option<BackendType>,
|
||||
pub health_summary: HashMap<HealthStatus, usize>,
|
||||
}
|
||||
364
tauri/src-tauri/src/cast.rs
Normal file
364
tauri/src-tauri/src/cast.rs
Normal file
|
|
@ -0,0 +1,364 @@
|
|||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::fs::File;
|
||||
use std::io::{BufWriter, Write};
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::Mutex;
|
||||
use chrono::{DateTime, Utc};
|
||||
|
||||
/// Asciinema cast v2 format header
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct CastHeader {
|
||||
pub version: u8,
|
||||
pub width: u16,
|
||||
pub height: u16,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub timestamp: Option<i64>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub duration: Option<f64>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub idle_time_limit: Option<f64>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub command: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub title: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub env: Option<HashMap<String, String>>,
|
||||
}
|
||||
|
||||
/// Event types for Asciinema cast format
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub enum EventType {
|
||||
Output,
|
||||
Input,
|
||||
}
|
||||
|
||||
impl EventType {
|
||||
fn as_str(&self) -> &'static str {
|
||||
match self {
|
||||
EventType::Output => "o",
|
||||
EventType::Input => "i",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A single event in the cast file
|
||||
#[derive(Debug)]
|
||||
pub struct CastEvent {
|
||||
pub timestamp: f64,
|
||||
pub event_type: EventType,
|
||||
pub data: String,
|
||||
}
|
||||
|
||||
/// Handles recording terminal sessions in Asciinema cast format
|
||||
pub struct CastRecorder {
|
||||
header: CastHeader,
|
||||
start_time: DateTime<Utc>,
|
||||
events: Arc<Mutex<Vec<CastEvent>>>,
|
||||
file_writer: Option<Arc<Mutex<BufWriter<File>>>>,
|
||||
is_recording: Arc<Mutex<bool>>,
|
||||
}
|
||||
|
||||
impl CastRecorder {
|
||||
/// Create a new cast recorder
|
||||
pub fn new(
|
||||
width: u16,
|
||||
height: u16,
|
||||
title: Option<String>,
|
||||
command: Option<String>,
|
||||
) -> Self {
|
||||
let now = Utc::now();
|
||||
let header = CastHeader {
|
||||
version: 2,
|
||||
width,
|
||||
height,
|
||||
timestamp: Some(now.timestamp()),
|
||||
duration: None,
|
||||
idle_time_limit: None,
|
||||
command,
|
||||
title,
|
||||
env: None,
|
||||
};
|
||||
|
||||
Self {
|
||||
header,
|
||||
start_time: now,
|
||||
events: Arc::new(Mutex::new(Vec::new())),
|
||||
file_writer: None,
|
||||
is_recording: Arc::new(Mutex::new(false)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Start recording to a file
|
||||
pub async fn start_recording(&mut self, path: impl AsRef<Path>) -> Result<(), String> {
|
||||
let mut is_recording = self.is_recording.lock().await;
|
||||
if *is_recording {
|
||||
return Err("Already recording".to_string());
|
||||
}
|
||||
|
||||
// Create file and write header
|
||||
let file = File::create(path.as_ref())
|
||||
.map_err(|e| format!("Failed to create cast file: {}", e))?;
|
||||
let mut writer = BufWriter::new(file);
|
||||
|
||||
// Write header as first line
|
||||
let header_json = serde_json::to_string(&self.header)
|
||||
.map_err(|e| format!("Failed to serialize header: {}", e))?;
|
||||
writeln!(writer, "{}", header_json)
|
||||
.map_err(|e| format!("Failed to write header: {}", e))?;
|
||||
|
||||
// Write any existing events
|
||||
let events = self.events.lock().await;
|
||||
for event in events.iter() {
|
||||
self.write_event_to_file(&mut writer, event)?;
|
||||
}
|
||||
|
||||
writer.flush()
|
||||
.map_err(|e| format!("Failed to flush writer: {}", e))?;
|
||||
|
||||
self.file_writer = Some(Arc::new(Mutex::new(writer)));
|
||||
*is_recording = true;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Stop recording
|
||||
pub async fn stop_recording(&mut self) -> Result<(), String> {
|
||||
let mut is_recording = self.is_recording.lock().await;
|
||||
if !*is_recording {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if let Some(writer_arc) = self.file_writer.take() {
|
||||
let mut writer = writer_arc.lock().await;
|
||||
writer.flush()
|
||||
.map_err(|e| format!("Failed to flush final data: {}", e))?;
|
||||
}
|
||||
|
||||
*is_recording = false;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Add output data to the recording
|
||||
pub async fn add_output(&self, data: &[u8]) -> Result<(), String> {
|
||||
self.add_event(EventType::Output, data).await
|
||||
}
|
||||
|
||||
/// Add input data to the recording
|
||||
pub async fn add_input(&self, data: &[u8]) -> Result<(), String> {
|
||||
self.add_event(EventType::Input, data).await
|
||||
}
|
||||
|
||||
/// Add an event to the recording
|
||||
async fn add_event(&self, event_type: EventType, data: &[u8]) -> Result<(), String> {
|
||||
let timestamp = Utc::now()
|
||||
.signed_duration_since(self.start_time)
|
||||
.num_milliseconds() as f64 / 1000.0;
|
||||
|
||||
// Convert data to string (handling potential UTF-8 errors)
|
||||
let data_string = String::from_utf8_lossy(data).to_string();
|
||||
|
||||
let event = CastEvent {
|
||||
timestamp,
|
||||
event_type,
|
||||
data: data_string,
|
||||
};
|
||||
|
||||
// If we have a file writer, write immediately
|
||||
if let Some(writer_arc) = &self.file_writer {
|
||||
let mut writer = writer_arc.lock().await;
|
||||
self.write_event_to_file(&mut writer, &event)?;
|
||||
writer.flush()
|
||||
.map_err(|e| format!("Failed to flush event: {}", e))?;
|
||||
}
|
||||
|
||||
// Also store in memory
|
||||
let mut events = self.events.lock().await;
|
||||
events.push(event);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Write an event to the file
|
||||
fn write_event_to_file(
|
||||
&self,
|
||||
writer: &mut BufWriter<File>,
|
||||
event: &CastEvent,
|
||||
) -> Result<(), String> {
|
||||
// Format: [timestamp, event_type, data]
|
||||
let event_array = serde_json::json!([
|
||||
event.timestamp,
|
||||
event.event_type.as_str(),
|
||||
event.data
|
||||
]);
|
||||
|
||||
writeln!(writer, "{}", event_array)
|
||||
.map_err(|e| format!("Failed to write event: {}", e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Save all recorded events to a file
|
||||
pub async fn save_to_file(&self, path: impl AsRef<Path>) -> Result<(), String> {
|
||||
let file = File::create(path.as_ref())
|
||||
.map_err(|e| format!("Failed to create cast file: {}", e))?;
|
||||
let mut writer = BufWriter::new(file);
|
||||
|
||||
// Calculate duration
|
||||
let events = self.events.lock().await;
|
||||
let duration = events.last().map(|e| e.timestamp);
|
||||
|
||||
// Update header with duration
|
||||
let mut header = self.header.clone();
|
||||
header.duration = duration;
|
||||
|
||||
// Write header
|
||||
let header_json = serde_json::to_string(&header)
|
||||
.map_err(|e| format!("Failed to serialize header: {}", e))?;
|
||||
writeln!(writer, "{}", header_json)
|
||||
.map_err(|e| format!("Failed to write header: {}", e))?;
|
||||
|
||||
// Write events
|
||||
for event in events.iter() {
|
||||
self.write_event_to_file(&mut writer, event)?;
|
||||
}
|
||||
|
||||
writer.flush()
|
||||
.map_err(|e| format!("Failed to flush file: {}", e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get the current recording duration
|
||||
pub async fn get_duration(&self) -> f64 {
|
||||
let events = self.events.lock().await;
|
||||
events.last().map(|e| e.timestamp).unwrap_or(0.0)
|
||||
}
|
||||
|
||||
/// Check if currently recording
|
||||
pub async fn is_recording(&self) -> bool {
|
||||
*self.is_recording.lock().await
|
||||
}
|
||||
|
||||
/// Update terminal dimensions
|
||||
pub async fn resize(&mut self, width: u16, height: u16) {
|
||||
self.header.width = width;
|
||||
self.header.height = height;
|
||||
// Note: In a real implementation, you might want to add a resize event
|
||||
}
|
||||
}
|
||||
|
||||
/// Manages cast recordings for multiple sessions
|
||||
pub struct CastManager {
|
||||
recorders: Arc<Mutex<HashMap<String, Arc<Mutex<CastRecorder>>>>>,
|
||||
}
|
||||
|
||||
impl CastManager {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
recorders: Arc::new(Mutex::new(HashMap::new())),
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a new recorder for a session
|
||||
pub async fn create_recorder(
|
||||
&self,
|
||||
session_id: String,
|
||||
width: u16,
|
||||
height: u16,
|
||||
title: Option<String>,
|
||||
command: Option<String>,
|
||||
) -> Result<(), String> {
|
||||
let mut recorders = self.recorders.lock().await;
|
||||
if recorders.contains_key(&session_id) {
|
||||
return Err("Recorder already exists for this session".to_string());
|
||||
}
|
||||
|
||||
let recorder = CastRecorder::new(width, height, title, command);
|
||||
recorders.insert(session_id, Arc::new(Mutex::new(recorder)));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get a recorder for a session
|
||||
pub async fn get_recorder(&self, session_id: &str) -> Option<Arc<Mutex<CastRecorder>>> {
|
||||
self.recorders.lock().await.get(session_id).cloned()
|
||||
}
|
||||
|
||||
/// Remove a recorder for a session
|
||||
pub async fn remove_recorder(&self, session_id: &str) -> Result<(), String> {
|
||||
let mut recorders = self.recorders.lock().await;
|
||||
if let Some(recorder_arc) = recorders.remove(session_id) {
|
||||
let mut recorder = recorder_arc.lock().await;
|
||||
recorder.stop_recording().await?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Start recording for a session
|
||||
pub async fn start_recording(
|
||||
&self,
|
||||
session_id: &str,
|
||||
path: impl AsRef<Path>,
|
||||
) -> Result<(), String> {
|
||||
if let Some(recorder_arc) = self.get_recorder(session_id).await {
|
||||
let mut recorder = recorder_arc.lock().await;
|
||||
recorder.start_recording(path).await
|
||||
} else {
|
||||
Err("No recorder found for session".to_string())
|
||||
}
|
||||
}
|
||||
|
||||
/// Stop recording for a session
|
||||
pub async fn stop_recording(&self, session_id: &str) -> Result<(), String> {
|
||||
if let Some(recorder_arc) = self.get_recorder(session_id).await {
|
||||
let mut recorder = recorder_arc.lock().await;
|
||||
recorder.stop_recording().await
|
||||
} else {
|
||||
Err("No recorder found for session".to_string())
|
||||
}
|
||||
}
|
||||
|
||||
/// Add output to a session's recording
|
||||
pub async fn add_output(&self, session_id: &str, data: &[u8]) -> Result<(), String> {
|
||||
if let Some(recorder_arc) = self.get_recorder(session_id).await {
|
||||
let recorder = recorder_arc.lock().await;
|
||||
recorder.add_output(data).await
|
||||
} else {
|
||||
Ok(()) // Silently ignore if no recorder
|
||||
}
|
||||
}
|
||||
|
||||
/// Add input to a session's recording
|
||||
pub async fn add_input(&self, session_id: &str, data: &[u8]) -> Result<(), String> {
|
||||
if let Some(recorder_arc) = self.get_recorder(session_id).await {
|
||||
let recorder = recorder_arc.lock().await;
|
||||
recorder.add_input(data).await
|
||||
} else {
|
||||
Ok(()) // Silently ignore if no recorder
|
||||
}
|
||||
}
|
||||
|
||||
/// Save a session's recording to file
|
||||
pub async fn save_recording(
|
||||
&self,
|
||||
session_id: &str,
|
||||
path: impl AsRef<Path>,
|
||||
) -> Result<(), String> {
|
||||
if let Some(recorder_arc) = self.get_recorder(session_id).await {
|
||||
let recorder = recorder_arc.lock().await;
|
||||
recorder.save_to_file(path).await
|
||||
} else {
|
||||
Err("No recorder found for session".to_string())
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if a session is being recorded
|
||||
pub async fn is_recording(&self, session_id: &str) -> bool {
|
||||
if let Some(recorder_arc) = self.get_recorder(session_id).await {
|
||||
let recorder = recorder_arc.lock().await;
|
||||
recorder.is_recording().await
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load diff
648
tauri/src-tauri/src/debug_features.rs
Normal file
648
tauri/src-tauri/src/debug_features.rs
Normal file
|
|
@ -0,0 +1,648 @@
|
|||
use serde::{Serialize, Deserialize};
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::RwLock;
|
||||
use std::collections::{HashMap, VecDeque};
|
||||
use chrono::{DateTime, Utc};
|
||||
use std::path::PathBuf;
|
||||
|
||||
/// Debug feature types
|
||||
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq, Hash)]
|
||||
pub enum DebugFeature {
|
||||
APITesting,
|
||||
PerformanceMonitoring,
|
||||
MemoryProfiling,
|
||||
NetworkInspector,
|
||||
EventLogger,
|
||||
StateInspector,
|
||||
LogViewer,
|
||||
CrashReporter,
|
||||
BenchmarkRunner,
|
||||
DiagnosticReport,
|
||||
}
|
||||
|
||||
/// Debug log level
|
||||
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub enum LogLevel {
|
||||
Trace,
|
||||
Debug,
|
||||
Info,
|
||||
Warn,
|
||||
Error,
|
||||
}
|
||||
|
||||
/// Debug log entry
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct LogEntry {
|
||||
pub timestamp: DateTime<Utc>,
|
||||
pub level: LogLevel,
|
||||
pub component: String,
|
||||
pub message: String,
|
||||
pub metadata: HashMap<String, serde_json::Value>,
|
||||
}
|
||||
|
||||
/// Performance metric
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct PerformanceMetric {
|
||||
pub name: String,
|
||||
pub value: f64,
|
||||
pub unit: String,
|
||||
pub timestamp: DateTime<Utc>,
|
||||
pub tags: HashMap<String, String>,
|
||||
}
|
||||
|
||||
/// Memory snapshot
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct MemorySnapshot {
|
||||
pub timestamp: DateTime<Utc>,
|
||||
pub heap_used_mb: f64,
|
||||
pub heap_total_mb: f64,
|
||||
pub external_mb: f64,
|
||||
pub process_rss_mb: f64,
|
||||
pub details: HashMap<String, f64>,
|
||||
}
|
||||
|
||||
/// Network request log
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct NetworkRequest {
|
||||
pub id: String,
|
||||
pub timestamp: DateTime<Utc>,
|
||||
pub method: String,
|
||||
pub url: String,
|
||||
pub status: Option<u16>,
|
||||
pub duration_ms: Option<u64>,
|
||||
pub request_headers: HashMap<String, String>,
|
||||
pub response_headers: HashMap<String, String>,
|
||||
pub request_body: Option<String>,
|
||||
pub response_body: Option<String>,
|
||||
pub error: Option<String>,
|
||||
}
|
||||
|
||||
/// API test case
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct APITestCase {
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
pub endpoint: String,
|
||||
pub method: String,
|
||||
pub headers: HashMap<String, String>,
|
||||
pub body: Option<serde_json::Value>,
|
||||
pub expected_status: u16,
|
||||
pub expected_body: Option<serde_json::Value>,
|
||||
pub timeout_ms: u64,
|
||||
}
|
||||
|
||||
/// API test result
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct APITestResult {
|
||||
pub test_id: String,
|
||||
pub success: bool,
|
||||
pub actual_status: Option<u16>,
|
||||
pub actual_body: Option<serde_json::Value>,
|
||||
pub duration_ms: u64,
|
||||
pub error: Option<String>,
|
||||
pub timestamp: DateTime<Utc>,
|
||||
}
|
||||
|
||||
/// Benchmark configuration
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct BenchmarkConfig {
|
||||
pub name: String,
|
||||
pub iterations: u32,
|
||||
pub warmup_iterations: u32,
|
||||
pub timeout_ms: u64,
|
||||
pub collect_memory: bool,
|
||||
pub collect_cpu: bool,
|
||||
}
|
||||
|
||||
/// Benchmark result
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct BenchmarkResult {
|
||||
pub name: String,
|
||||
pub iterations: u32,
|
||||
pub mean_ms: f64,
|
||||
pub median_ms: f64,
|
||||
pub min_ms: f64,
|
||||
pub max_ms: f64,
|
||||
pub std_dev_ms: f64,
|
||||
pub memory_usage_mb: Option<f64>,
|
||||
pub cpu_usage_percent: Option<f64>,
|
||||
pub timestamp: DateTime<Utc>,
|
||||
}
|
||||
|
||||
/// Diagnostic report
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct DiagnosticReport {
|
||||
pub timestamp: DateTime<Utc>,
|
||||
pub system_info: SystemInfo,
|
||||
pub app_info: AppInfo,
|
||||
pub performance_summary: PerformanceSummary,
|
||||
pub error_summary: ErrorSummary,
|
||||
pub recommendations: Vec<String>,
|
||||
}
|
||||
|
||||
/// System information
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct SystemInfo {
|
||||
pub os: String,
|
||||
pub arch: String,
|
||||
pub cpu_count: usize,
|
||||
pub total_memory_mb: u64,
|
||||
pub available_memory_mb: u64,
|
||||
pub disk_space_mb: u64,
|
||||
pub node_version: Option<String>,
|
||||
pub rust_version: String,
|
||||
}
|
||||
|
||||
/// Application information
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct AppInfo {
|
||||
pub version: String,
|
||||
pub build_date: String,
|
||||
pub uptime_seconds: u64,
|
||||
pub active_sessions: usize,
|
||||
pub total_requests: u64,
|
||||
pub error_count: u64,
|
||||
}
|
||||
|
||||
/// Performance summary
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct PerformanceSummary {
|
||||
pub avg_response_time_ms: f64,
|
||||
pub p95_response_time_ms: f64,
|
||||
pub p99_response_time_ms: f64,
|
||||
pub requests_per_second: f64,
|
||||
pub cpu_usage_percent: f64,
|
||||
pub memory_usage_mb: f64,
|
||||
}
|
||||
|
||||
/// Error summary
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ErrorSummary {
|
||||
pub total_errors: u64,
|
||||
pub errors_by_type: HashMap<String, u64>,
|
||||
pub recent_errors: Vec<LogEntry>,
|
||||
}
|
||||
|
||||
/// Debug settings
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct DebugSettings {
|
||||
pub enabled: bool,
|
||||
pub log_level: LogLevel,
|
||||
pub max_log_entries: usize,
|
||||
pub enable_performance_monitoring: bool,
|
||||
pub enable_memory_profiling: bool,
|
||||
pub enable_network_inspector: bool,
|
||||
pub enable_crash_reporting: bool,
|
||||
pub log_to_file: bool,
|
||||
pub log_file_path: Option<PathBuf>,
|
||||
pub performance_sample_interval_ms: u64,
|
||||
pub memory_sample_interval_ms: u64,
|
||||
}
|
||||
|
||||
impl Default for DebugSettings {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
enabled: cfg!(debug_assertions),
|
||||
log_level: LogLevel::Info,
|
||||
max_log_entries: 10000,
|
||||
enable_performance_monitoring: false,
|
||||
enable_memory_profiling: false,
|
||||
enable_network_inspector: false,
|
||||
enable_crash_reporting: true,
|
||||
log_to_file: false,
|
||||
log_file_path: None,
|
||||
performance_sample_interval_ms: 1000,
|
||||
memory_sample_interval_ms: 5000,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Debug features manager
|
||||
pub struct DebugFeaturesManager {
|
||||
settings: Arc<RwLock<DebugSettings>>,
|
||||
logs: Arc<RwLock<VecDeque<LogEntry>>>,
|
||||
performance_metrics: Arc<RwLock<VecDeque<PerformanceMetric>>>,
|
||||
memory_snapshots: Arc<RwLock<VecDeque<MemorySnapshot>>>,
|
||||
network_requests: Arc<RwLock<HashMap<String, NetworkRequest>>>,
|
||||
api_test_results: Arc<RwLock<HashMap<String, Vec<APITestResult>>>>,
|
||||
benchmark_results: Arc<RwLock<Vec<BenchmarkResult>>>,
|
||||
notification_manager: Option<Arc<crate::notification_manager::NotificationManager>>,
|
||||
}
|
||||
|
||||
impl DebugFeaturesManager {
|
||||
/// Create a new debug features manager
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
settings: Arc::new(RwLock::new(DebugSettings::default())),
|
||||
logs: Arc::new(RwLock::new(VecDeque::new())),
|
||||
performance_metrics: Arc::new(RwLock::new(VecDeque::new())),
|
||||
memory_snapshots: Arc::new(RwLock::new(VecDeque::new())),
|
||||
network_requests: Arc::new(RwLock::new(HashMap::new())),
|
||||
api_test_results: Arc::new(RwLock::new(HashMap::new())),
|
||||
benchmark_results: Arc::new(RwLock::new(Vec::new())),
|
||||
notification_manager: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Set the notification manager
|
||||
pub fn set_notification_manager(&mut self, notification_manager: Arc<crate::notification_manager::NotificationManager>) {
|
||||
self.notification_manager = Some(notification_manager);
|
||||
}
|
||||
|
||||
/// Get debug settings
|
||||
pub async fn get_settings(&self) -> DebugSettings {
|
||||
self.settings.read().await.clone()
|
||||
}
|
||||
|
||||
/// Update debug settings
|
||||
pub async fn update_settings(&self, settings: DebugSettings) {
|
||||
*self.settings.write().await = settings;
|
||||
}
|
||||
|
||||
/// Log a message
|
||||
pub async fn log(&self, level: LogLevel, component: &str, message: &str, metadata: HashMap<String, serde_json::Value>) {
|
||||
let settings = self.settings.read().await;
|
||||
|
||||
// Check if logging is enabled and level is appropriate
|
||||
if !settings.enabled || level < settings.log_level {
|
||||
return;
|
||||
}
|
||||
|
||||
let entry = LogEntry {
|
||||
timestamp: Utc::now(),
|
||||
level,
|
||||
component: component.to_string(),
|
||||
message: message.to_string(),
|
||||
metadata,
|
||||
};
|
||||
|
||||
// Add to in-memory log
|
||||
let mut logs = self.logs.write().await;
|
||||
logs.push_back(entry.clone());
|
||||
|
||||
// Limit log size
|
||||
while logs.len() > settings.max_log_entries {
|
||||
logs.pop_front();
|
||||
}
|
||||
|
||||
// Log to file if enabled
|
||||
if settings.log_to_file {
|
||||
if let Some(path) = &settings.log_file_path {
|
||||
let _ = self.write_log_to_file(&entry, path).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Record a performance metric
|
||||
pub async fn record_metric(&self, name: &str, value: f64, unit: &str, tags: HashMap<String, String>) {
|
||||
let settings = self.settings.read().await;
|
||||
|
||||
if !settings.enabled || !settings.enable_performance_monitoring {
|
||||
return;
|
||||
}
|
||||
|
||||
let metric = PerformanceMetric {
|
||||
name: name.to_string(),
|
||||
value,
|
||||
unit: unit.to_string(),
|
||||
timestamp: Utc::now(),
|
||||
tags,
|
||||
};
|
||||
|
||||
let mut metrics = self.performance_metrics.write().await;
|
||||
metrics.push_back(metric);
|
||||
|
||||
// Keep only last 1000 metrics
|
||||
while metrics.len() > 1000 {
|
||||
metrics.pop_front();
|
||||
}
|
||||
}
|
||||
|
||||
/// Take a memory snapshot
|
||||
pub async fn take_memory_snapshot(&self) -> Result<MemorySnapshot, String> {
|
||||
let settings = self.settings.read().await;
|
||||
|
||||
if !settings.enabled || !settings.enable_memory_profiling {
|
||||
return Err("Memory profiling is disabled".to_string());
|
||||
}
|
||||
|
||||
// TODO: Implement actual memory profiling
|
||||
let snapshot = MemorySnapshot {
|
||||
timestamp: Utc::now(),
|
||||
heap_used_mb: 0.0,
|
||||
heap_total_mb: 0.0,
|
||||
external_mb: 0.0,
|
||||
process_rss_mb: 0.0,
|
||||
details: HashMap::new(),
|
||||
};
|
||||
|
||||
let mut snapshots = self.memory_snapshots.write().await;
|
||||
snapshots.push_back(snapshot.clone());
|
||||
|
||||
// Keep only last 100 snapshots
|
||||
while snapshots.len() > 100 {
|
||||
snapshots.pop_front();
|
||||
}
|
||||
|
||||
Ok(snapshot)
|
||||
}
|
||||
|
||||
/// Log a network request
|
||||
pub async fn log_network_request(&self, request: NetworkRequest) {
|
||||
let settings = self.settings.read().await;
|
||||
|
||||
if !settings.enabled || !settings.enable_network_inspector {
|
||||
return;
|
||||
}
|
||||
|
||||
let mut requests = self.network_requests.write().await;
|
||||
requests.insert(request.id.clone(), request);
|
||||
|
||||
// Keep only last 500 requests
|
||||
if requests.len() > 500 {
|
||||
// Remove oldest entries
|
||||
let mut ids: Vec<_> = requests.keys().cloned().collect();
|
||||
ids.sort();
|
||||
for id in ids.iter().take(requests.len() - 500) {
|
||||
requests.remove(id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Run API tests
|
||||
pub async fn run_api_tests(&self, tests: Vec<APITestCase>) -> Vec<APITestResult> {
|
||||
let mut results = Vec::new();
|
||||
|
||||
for test in tests {
|
||||
let result = self.run_single_api_test(&test).await;
|
||||
results.push(result.clone());
|
||||
|
||||
// Store result
|
||||
let mut test_results = self.api_test_results.write().await;
|
||||
test_results.entry(test.id.clone())
|
||||
.or_insert_with(Vec::new)
|
||||
.push(result);
|
||||
}
|
||||
|
||||
results
|
||||
}
|
||||
|
||||
/// Run a single API test
|
||||
async fn run_single_api_test(&self, test: &APITestCase) -> APITestResult {
|
||||
let start = std::time::Instant::now();
|
||||
|
||||
// TODO: Implement actual API testing
|
||||
let duration_ms = start.elapsed().as_millis() as u64;
|
||||
|
||||
APITestResult {
|
||||
test_id: test.id.clone(),
|
||||
success: false,
|
||||
actual_status: None,
|
||||
actual_body: None,
|
||||
duration_ms,
|
||||
error: Some("API testing not yet implemented".to_string()),
|
||||
timestamp: Utc::now(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Run benchmarks
|
||||
pub async fn run_benchmarks(&self, configs: Vec<BenchmarkConfig>) -> Vec<BenchmarkResult> {
|
||||
let mut results = Vec::new();
|
||||
|
||||
for config in configs {
|
||||
let result = self.run_single_benchmark(&config).await;
|
||||
results.push(result.clone());
|
||||
|
||||
// Store result
|
||||
self.benchmark_results.write().await.push(result);
|
||||
}
|
||||
|
||||
results
|
||||
}
|
||||
|
||||
/// Run a single benchmark
|
||||
async fn run_single_benchmark(&self, config: &BenchmarkConfig) -> BenchmarkResult {
|
||||
// TODO: Implement actual benchmarking
|
||||
BenchmarkResult {
|
||||
name: config.name.clone(),
|
||||
iterations: config.iterations,
|
||||
mean_ms: 0.0,
|
||||
median_ms: 0.0,
|
||||
min_ms: 0.0,
|
||||
max_ms: 0.0,
|
||||
std_dev_ms: 0.0,
|
||||
memory_usage_mb: None,
|
||||
cpu_usage_percent: None,
|
||||
timestamp: Utc::now(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Generate diagnostic report
|
||||
pub async fn generate_diagnostic_report(&self) -> DiagnosticReport {
|
||||
let system_info = self.get_system_info().await;
|
||||
let app_info = self.get_app_info().await;
|
||||
let performance_summary = self.get_performance_summary().await;
|
||||
let error_summary = self.get_error_summary().await;
|
||||
let recommendations = self.generate_recommendations(&system_info, &app_info, &performance_summary, &error_summary);
|
||||
|
||||
DiagnosticReport {
|
||||
timestamp: Utc::now(),
|
||||
system_info,
|
||||
app_info,
|
||||
performance_summary,
|
||||
error_summary,
|
||||
recommendations,
|
||||
}
|
||||
}
|
||||
|
||||
/// Get recent logs
|
||||
pub async fn get_logs(&self, limit: Option<usize>, level: Option<LogLevel>) -> Vec<LogEntry> {
|
||||
let logs = self.logs.read().await;
|
||||
let iter = logs.iter().rev();
|
||||
|
||||
let filtered: Vec<_> = if let Some(min_level) = level {
|
||||
iter.filter(|log| log.level >= min_level).cloned().collect()
|
||||
} else {
|
||||
iter.cloned().collect()
|
||||
};
|
||||
|
||||
match limit {
|
||||
Some(n) => filtered.into_iter().take(n).collect(),
|
||||
None => filtered,
|
||||
}
|
||||
}
|
||||
|
||||
/// Get performance metrics
|
||||
pub async fn get_performance_metrics(&self, limit: Option<usize>) -> Vec<PerformanceMetric> {
|
||||
let metrics = self.performance_metrics.read().await;
|
||||
match limit {
|
||||
Some(n) => metrics.iter().rev().take(n).cloned().collect(),
|
||||
None => metrics.iter().cloned().collect(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Get memory snapshots
|
||||
pub async fn get_memory_snapshots(&self, limit: Option<usize>) -> Vec<MemorySnapshot> {
|
||||
let snapshots = self.memory_snapshots.read().await;
|
||||
match limit {
|
||||
Some(n) => snapshots.iter().rev().take(n).cloned().collect(),
|
||||
None => snapshots.iter().cloned().collect(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Get network requests
|
||||
pub async fn get_network_requests(&self, limit: Option<usize>) -> Vec<NetworkRequest> {
|
||||
let requests = self.network_requests.read().await;
|
||||
let mut sorted: Vec<_> = requests.values().cloned().collect();
|
||||
sorted.sort_by(|a, b| b.timestamp.cmp(&a.timestamp));
|
||||
|
||||
match limit {
|
||||
Some(n) => sorted.into_iter().take(n).collect(),
|
||||
None => sorted,
|
||||
}
|
||||
}
|
||||
|
||||
/// Clear all debug data
|
||||
pub async fn clear_all_data(&self) {
|
||||
self.logs.write().await.clear();
|
||||
self.performance_metrics.write().await.clear();
|
||||
self.memory_snapshots.write().await.clear();
|
||||
self.network_requests.write().await.clear();
|
||||
self.api_test_results.write().await.clear();
|
||||
self.benchmark_results.write().await.clear();
|
||||
}
|
||||
|
||||
/// Enable/disable debug mode
|
||||
pub async fn set_debug_mode(&self, enabled: bool) {
|
||||
self.settings.write().await.enabled = enabled;
|
||||
|
||||
if let Some(notification_manager) = &self.notification_manager {
|
||||
let message = if enabled {
|
||||
"Debug mode enabled"
|
||||
} else {
|
||||
"Debug mode disabled"
|
||||
};
|
||||
let _ = notification_manager.notify_success("Debug Mode", message).await;
|
||||
}
|
||||
}
|
||||
|
||||
// Helper methods
|
||||
async fn write_log_to_file(&self, entry: &LogEntry, path: &PathBuf) -> Result<(), String> {
|
||||
use tokio::io::AsyncWriteExt;
|
||||
|
||||
let log_line = format!(
|
||||
"[{}] [{}] [{}] {}\n",
|
||||
entry.timestamp.format("%Y-%m-%d %H:%M:%S%.3f"),
|
||||
format!("{:?}", entry.level),
|
||||
entry.component,
|
||||
entry.message
|
||||
);
|
||||
|
||||
let mut file = tokio::fs::OpenOptions::new()
|
||||
.create(true)
|
||||
.append(true)
|
||||
.open(path)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
file.write_all(log_line.as_bytes()).await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn get_system_info(&self) -> SystemInfo {
|
||||
SystemInfo {
|
||||
os: std::env::consts::OS.to_string(),
|
||||
arch: std::env::consts::ARCH.to_string(),
|
||||
cpu_count: num_cpus::get(),
|
||||
total_memory_mb: 0, // TODO: Get actual memory
|
||||
available_memory_mb: 0,
|
||||
disk_space_mb: 0,
|
||||
node_version: None,
|
||||
rust_version: "1.70.0".to_string(), // TODO: Get actual rust version
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_app_info(&self) -> AppInfo {
|
||||
AppInfo {
|
||||
version: env!("CARGO_PKG_VERSION").to_string(),
|
||||
build_date: chrono::Utc::now().to_rfc3339(), // TODO: Get actual build date
|
||||
uptime_seconds: 0, // TODO: Track uptime
|
||||
active_sessions: 0,
|
||||
total_requests: 0,
|
||||
error_count: 0,
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_performance_summary(&self) -> PerformanceSummary {
|
||||
PerformanceSummary {
|
||||
avg_response_time_ms: 0.0,
|
||||
p95_response_time_ms: 0.0,
|
||||
p99_response_time_ms: 0.0,
|
||||
requests_per_second: 0.0,
|
||||
cpu_usage_percent: 0.0,
|
||||
memory_usage_mb: 0.0,
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_error_summary(&self) -> ErrorSummary {
|
||||
let logs = self.logs.read().await;
|
||||
let errors: Vec<_> = logs.iter()
|
||||
.filter(|log| log.level == LogLevel::Error)
|
||||
.cloned()
|
||||
.collect();
|
||||
|
||||
let mut errors_by_type = HashMap::new();
|
||||
for error in &errors {
|
||||
let error_type = error.metadata.get("type")
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or("unknown")
|
||||
.to_string();
|
||||
*errors_by_type.entry(error_type).or_insert(0) += 1;
|
||||
}
|
||||
|
||||
ErrorSummary {
|
||||
total_errors: errors.len() as u64,
|
||||
errors_by_type,
|
||||
recent_errors: errors.into_iter().rev().take(10).collect(),
|
||||
}
|
||||
}
|
||||
|
||||
fn generate_recommendations(&self, system: &SystemInfo, _app: &AppInfo, perf: &PerformanceSummary, errors: &ErrorSummary) -> Vec<String> {
|
||||
let mut recommendations = Vec::new();
|
||||
|
||||
if perf.cpu_usage_percent > 80.0 {
|
||||
recommendations.push("High CPU usage detected. Consider optimizing performance-critical code.".to_string());
|
||||
}
|
||||
|
||||
if perf.memory_usage_mb > (system.total_memory_mb as f64 * 0.8) {
|
||||
recommendations.push("High memory usage detected. Check for memory leaks.".to_string());
|
||||
}
|
||||
|
||||
if errors.total_errors > 100 {
|
||||
recommendations.push("High error rate detected. Review error logs for patterns.".to_string());
|
||||
}
|
||||
|
||||
if perf.avg_response_time_ms > 1000.0 {
|
||||
recommendations.push("Slow response times detected. Consider caching or query optimization.".to_string());
|
||||
}
|
||||
|
||||
recommendations
|
||||
}
|
||||
}
|
||||
|
||||
/// Debug statistics
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct DebugStats {
|
||||
pub total_logs: usize,
|
||||
pub logs_by_level: HashMap<String, usize>,
|
||||
pub total_metrics: usize,
|
||||
pub total_snapshots: usize,
|
||||
pub total_requests: usize,
|
||||
pub total_test_results: usize,
|
||||
pub total_benchmarks: usize,
|
||||
}
|
||||
|
||||
// Re-export num_cpus if needed
|
||||
extern crate num_cpus;
|
||||
428
tauri/src-tauri/src/fs_api.rs
Normal file
428
tauri/src-tauri/src/fs_api.rs
Normal file
|
|
@ -0,0 +1,428 @@
|
|||
use axum::{
|
||||
extract::{Path, Query, State as AxumState},
|
||||
http::{StatusCode, header},
|
||||
response::{IntoResponse, Response},
|
||||
Json,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::path::PathBuf;
|
||||
use tokio::fs;
|
||||
use tokio::io::AsyncReadExt;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct FileQuery {
|
||||
pub path: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct FileMetadata {
|
||||
pub name: String,
|
||||
pub path: String,
|
||||
pub size: u64,
|
||||
pub is_dir: bool,
|
||||
pub is_file: bool,
|
||||
pub is_symlink: bool,
|
||||
pub readonly: bool,
|
||||
pub hidden: bool,
|
||||
pub created: Option<String>,
|
||||
pub modified: Option<String>,
|
||||
pub accessed: Option<String>,
|
||||
#[cfg(unix)]
|
||||
pub permissions: Option<String>,
|
||||
pub mime_type: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct MoveRequest {
|
||||
pub from: String,
|
||||
pub to: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct CopyRequest {
|
||||
pub from: String,
|
||||
pub to: String,
|
||||
pub overwrite: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct WriteFileRequest {
|
||||
pub path: String,
|
||||
pub content: String,
|
||||
pub encoding: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct OperationResult {
|
||||
pub success: bool,
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
/// Expand tilde to home directory
|
||||
fn expand_path(path: &str) -> Result<PathBuf, StatusCode> {
|
||||
if path.starts_with('~') {
|
||||
let home = dirs::home_dir()
|
||||
.ok_or(StatusCode::INTERNAL_SERVER_ERROR)?;
|
||||
Ok(home.join(path.strip_prefix("~/").unwrap_or("")))
|
||||
} else {
|
||||
Ok(PathBuf::from(path))
|
||||
}
|
||||
}
|
||||
|
||||
/// Get detailed file metadata
|
||||
pub async fn get_file_info(
|
||||
Query(params): Query<FileQuery>,
|
||||
) -> Result<Json<FileMetadata>, StatusCode> {
|
||||
let path = expand_path(¶ms.path)?;
|
||||
|
||||
let metadata = fs::metadata(&path).await
|
||||
.map_err(|_| StatusCode::NOT_FOUND)?;
|
||||
|
||||
let name = path.file_name()
|
||||
.map(|n| n.to_string_lossy().to_string())
|
||||
.unwrap_or_else(|| path.to_string_lossy().to_string());
|
||||
|
||||
let is_symlink = fs::symlink_metadata(&path).await
|
||||
.map(|m| m.file_type().is_symlink())
|
||||
.unwrap_or(false);
|
||||
|
||||
let hidden = name.starts_with('.');
|
||||
|
||||
let created = metadata.created()
|
||||
.map(|t| {
|
||||
let datetime: chrono::DateTime<chrono::Utc> = t.into();
|
||||
datetime.to_rfc3339()
|
||||
})
|
||||
.ok();
|
||||
|
||||
let modified = metadata.modified()
|
||||
.map(|t| {
|
||||
let datetime: chrono::DateTime<chrono::Utc> = t.into();
|
||||
datetime.to_rfc3339()
|
||||
})
|
||||
.ok();
|
||||
|
||||
let accessed = metadata.accessed()
|
||||
.map(|t| {
|
||||
let datetime: chrono::DateTime<chrono::Utc> = t.into();
|
||||
datetime.to_rfc3339()
|
||||
})
|
||||
.ok();
|
||||
|
||||
#[cfg(unix)]
|
||||
let permissions = {
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
Some(format!("{:o}", metadata.permissions().mode() & 0o777))
|
||||
};
|
||||
|
||||
let mime_type = if metadata.is_file() {
|
||||
// Simple MIME type detection based on extension
|
||||
let ext = path.extension()
|
||||
.and_then(|e| e.to_str())
|
||||
.unwrap_or("");
|
||||
|
||||
Some(match ext {
|
||||
"txt" => "text/plain",
|
||||
"html" | "htm" => "text/html",
|
||||
"css" => "text/css",
|
||||
"js" => "application/javascript",
|
||||
"json" => "application/json",
|
||||
"png" => "image/png",
|
||||
"jpg" | "jpeg" => "image/jpeg",
|
||||
"gif" => "image/gif",
|
||||
"pdf" => "application/pdf",
|
||||
"zip" => "application/zip",
|
||||
_ => "application/octet-stream",
|
||||
}.to_string())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Ok(Json(FileMetadata {
|
||||
name,
|
||||
path: path.to_string_lossy().to_string(),
|
||||
size: metadata.len(),
|
||||
is_dir: metadata.is_dir(),
|
||||
is_file: metadata.is_file(),
|
||||
is_symlink,
|
||||
readonly: metadata.permissions().readonly(),
|
||||
hidden,
|
||||
created,
|
||||
modified,
|
||||
accessed,
|
||||
#[cfg(unix)]
|
||||
permissions,
|
||||
#[cfg(not(unix))]
|
||||
permissions: None,
|
||||
mime_type,
|
||||
}))
|
||||
}
|
||||
|
||||
/// Read file contents
|
||||
pub async fn read_file(
|
||||
Query(params): Query<FileQuery>,
|
||||
) -> Result<Response, StatusCode> {
|
||||
let path = expand_path(¶ms.path)?;
|
||||
|
||||
// Check if file exists and is a file
|
||||
let metadata = fs::metadata(&path).await
|
||||
.map_err(|_| StatusCode::NOT_FOUND)?;
|
||||
|
||||
if !metadata.is_file() {
|
||||
return Err(StatusCode::BAD_REQUEST);
|
||||
}
|
||||
|
||||
// Read file contents
|
||||
let mut file = fs::File::open(&path).await
|
||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
||||
|
||||
let mut contents = Vec::new();
|
||||
file.read_to_end(&mut contents).await
|
||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
||||
|
||||
// Determine content type
|
||||
let content_type = path.extension()
|
||||
.and_then(|e| e.to_str())
|
||||
.and_then(|ext| match ext {
|
||||
"txt" => Some("text/plain"),
|
||||
"html" | "htm" => Some("text/html"),
|
||||
"css" => Some("text/css"),
|
||||
"js" => Some("application/javascript"),
|
||||
"json" => Some("application/json"),
|
||||
"png" => Some("image/png"),
|
||||
"jpg" | "jpeg" => Some("image/jpeg"),
|
||||
"gif" => Some("image/gif"),
|
||||
"pdf" => Some("application/pdf"),
|
||||
_ => None,
|
||||
})
|
||||
.unwrap_or("application/octet-stream");
|
||||
|
||||
Ok((
|
||||
[(header::CONTENT_TYPE, content_type)],
|
||||
contents,
|
||||
).into_response())
|
||||
}
|
||||
|
||||
/// Write file contents
|
||||
pub async fn write_file(
|
||||
Json(req): Json<WriteFileRequest>,
|
||||
) -> Result<Json<OperationResult>, StatusCode> {
|
||||
let path = expand_path(&req.path)?;
|
||||
|
||||
// Ensure parent directory exists
|
||||
if let Some(parent) = path.parent() {
|
||||
fs::create_dir_all(parent).await
|
||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
||||
}
|
||||
|
||||
// Write file
|
||||
let content = if req.encoding.as_deref() == Some("base64") {
|
||||
base64::decode(&req.content)
|
||||
.map_err(|_| StatusCode::BAD_REQUEST)?
|
||||
} else {
|
||||
req.content.into_bytes()
|
||||
};
|
||||
|
||||
fs::write(&path, content).await
|
||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
||||
|
||||
Ok(Json(OperationResult {
|
||||
success: true,
|
||||
message: format!("File written successfully: {}", path.display()),
|
||||
}))
|
||||
}
|
||||
|
||||
/// Delete file or directory
|
||||
pub async fn delete_file(
|
||||
Query(params): Query<FileQuery>,
|
||||
) -> Result<Json<OperationResult>, StatusCode> {
|
||||
let path = expand_path(¶ms.path)?;
|
||||
|
||||
// Check if path exists
|
||||
let metadata = fs::metadata(&path).await
|
||||
.map_err(|_| StatusCode::NOT_FOUND)?;
|
||||
|
||||
// Delete based on type
|
||||
if metadata.is_dir() {
|
||||
fs::remove_dir_all(&path).await
|
||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
||||
} else {
|
||||
fs::remove_file(&path).await
|
||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
||||
}
|
||||
|
||||
Ok(Json(OperationResult {
|
||||
success: true,
|
||||
message: format!("Deleted: {}", path.display()),
|
||||
}))
|
||||
}
|
||||
|
||||
/// Move/rename file or directory
|
||||
pub async fn move_file(
|
||||
Json(req): Json<MoveRequest>,
|
||||
) -> Result<Json<OperationResult>, StatusCode> {
|
||||
let from_path = expand_path(&req.from)?;
|
||||
let to_path = expand_path(&req.to)?;
|
||||
|
||||
// Check if source exists
|
||||
if !from_path.exists() {
|
||||
return Err(StatusCode::NOT_FOUND);
|
||||
}
|
||||
|
||||
// Check if destination already exists
|
||||
if to_path.exists() {
|
||||
return Err(StatusCode::CONFLICT);
|
||||
}
|
||||
|
||||
// Ensure destination parent directory exists
|
||||
if let Some(parent) = to_path.parent() {
|
||||
fs::create_dir_all(parent).await
|
||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
||||
}
|
||||
|
||||
// Move the file/directory
|
||||
fs::rename(&from_path, &to_path).await
|
||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
||||
|
||||
Ok(Json(OperationResult {
|
||||
success: true,
|
||||
message: format!("Moved from {} to {}", from_path.display(), to_path.display()),
|
||||
}))
|
||||
}
|
||||
|
||||
/// Copy file or directory
|
||||
pub async fn copy_file(
|
||||
Json(req): Json<CopyRequest>,
|
||||
) -> Result<Json<OperationResult>, StatusCode> {
|
||||
let from_path = expand_path(&req.from)?;
|
||||
let to_path = expand_path(&req.to)?;
|
||||
|
||||
// Check if source exists
|
||||
let metadata = fs::metadata(&from_path).await
|
||||
.map_err(|_| StatusCode::NOT_FOUND)?;
|
||||
|
||||
// Check if destination already exists
|
||||
if to_path.exists() && !req.overwrite.unwrap_or(false) {
|
||||
return Err(StatusCode::CONFLICT);
|
||||
}
|
||||
|
||||
// Ensure destination parent directory exists
|
||||
if let Some(parent) = to_path.parent() {
|
||||
fs::create_dir_all(parent).await
|
||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
||||
}
|
||||
|
||||
// Copy based on type
|
||||
if metadata.is_file() {
|
||||
fs::copy(&from_path, &to_path).await
|
||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
||||
} else if metadata.is_dir() {
|
||||
// Recursive directory copy
|
||||
copy_dir_recursive(&from_path, &to_path).await?;
|
||||
}
|
||||
|
||||
Ok(Json(OperationResult {
|
||||
success: true,
|
||||
message: format!("Copied from {} to {}", from_path.display(), to_path.display()),
|
||||
}))
|
||||
}
|
||||
|
||||
/// Recursively copy a directory
|
||||
async fn copy_dir_recursive(from: &PathBuf, to: &PathBuf) -> Result<(), StatusCode> {
|
||||
fs::create_dir_all(to).await
|
||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
||||
|
||||
let mut entries = fs::read_dir(from).await
|
||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
||||
|
||||
while let Some(entry) = entries.next_entry().await
|
||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)? {
|
||||
|
||||
let from_path = entry.path();
|
||||
let to_path = to.join(entry.file_name());
|
||||
|
||||
let metadata = entry.metadata().await
|
||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
||||
|
||||
if metadata.is_file() {
|
||||
fs::copy(&from_path, &to_path).await
|
||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
||||
} else if metadata.is_dir() {
|
||||
Box::pin(copy_dir_recursive(&from_path, &to_path)).await?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Search for files matching a pattern
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct SearchQuery {
|
||||
pub path: String,
|
||||
pub pattern: String,
|
||||
pub max_depth: Option<u32>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct SearchResult {
|
||||
pub path: String,
|
||||
pub name: String,
|
||||
pub is_dir: bool,
|
||||
pub size: u64,
|
||||
}
|
||||
|
||||
pub async fn search_files(
|
||||
Query(params): Query<SearchQuery>,
|
||||
) -> Result<Json<Vec<SearchResult>>, StatusCode> {
|
||||
let base_path = expand_path(¶ms.path)?;
|
||||
let pattern = params.pattern.to_lowercase();
|
||||
let max_depth = params.max_depth.unwrap_or(5);
|
||||
|
||||
let mut results = Vec::new();
|
||||
search_recursive(&base_path, &pattern, 0, max_depth, &mut results).await?;
|
||||
|
||||
Ok(Json(results))
|
||||
}
|
||||
|
||||
async fn search_recursive(
|
||||
path: &PathBuf,
|
||||
pattern: &str,
|
||||
depth: u32,
|
||||
max_depth: u32,
|
||||
results: &mut Vec<SearchResult>,
|
||||
) -> Result<(), StatusCode> {
|
||||
if depth > max_depth {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut entries = fs::read_dir(path).await
|
||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
||||
|
||||
while let Some(entry) = entries.next_entry().await
|
||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)? {
|
||||
|
||||
let entry_path = entry.path();
|
||||
let file_name = entry.file_name().to_string_lossy().to_string();
|
||||
|
||||
if file_name.to_lowercase().contains(pattern) {
|
||||
let metadata = entry.metadata().await
|
||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
||||
|
||||
results.push(SearchResult {
|
||||
path: entry_path.to_string_lossy().to_string(),
|
||||
name: file_name,
|
||||
is_dir: metadata.is_dir(),
|
||||
size: metadata.len(),
|
||||
});
|
||||
}
|
||||
|
||||
// Recurse into directories
|
||||
if entry.file_type().await
|
||||
.map(|t| t.is_dir())
|
||||
.unwrap_or(false) {
|
||||
Box::pin(search_recursive(&entry_path, pattern, depth + 1, max_depth, results)).await?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
@ -9,6 +9,20 @@ pub mod auth;
|
|||
pub mod terminal_detector;
|
||||
pub mod cli_installer;
|
||||
pub mod tray_menu;
|
||||
pub mod cast;
|
||||
pub mod tty_forward;
|
||||
pub mod session_monitor;
|
||||
pub mod port_conflict;
|
||||
pub mod network_utils;
|
||||
pub mod notification_manager;
|
||||
pub mod welcome;
|
||||
pub mod permissions;
|
||||
pub mod updater;
|
||||
pub mod backend_manager;
|
||||
pub mod debug_features;
|
||||
pub mod api_testing;
|
||||
pub mod auth_cache;
|
||||
pub mod terminal_integrations;
|
||||
|
||||
#[cfg(mobile)]
|
||||
pub fn init() {
|
||||
|
|
|
|||
|
|
@ -19,6 +19,23 @@ mod terminal_detector;
|
|||
mod cli_installer;
|
||||
mod auth;
|
||||
mod tray_menu;
|
||||
mod cast;
|
||||
mod tty_forward;
|
||||
mod session_monitor;
|
||||
mod port_conflict;
|
||||
mod network_utils;
|
||||
mod notification_manager;
|
||||
mod welcome;
|
||||
mod permissions;
|
||||
mod updater;
|
||||
mod backend_manager;
|
||||
mod debug_features;
|
||||
mod api_testing;
|
||||
mod auth_cache;
|
||||
mod terminal_integrations;
|
||||
mod app_mover;
|
||||
mod terminal_spawn_service;
|
||||
mod fs_api;
|
||||
|
||||
use commands::*;
|
||||
use state::AppState;
|
||||
|
|
@ -40,8 +57,8 @@ fn open_settings_window(app: AppHandle) -> Result<(), String> {
|
|||
)
|
||||
.title("VibeTunnel Settings")
|
||||
.inner_size(800.0, 600.0)
|
||||
.resizable(false)
|
||||
.decorations(false)
|
||||
.resizable(true)
|
||||
.decorations(true)
|
||||
.center()
|
||||
.build()
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
|
@ -108,8 +125,197 @@ fn main() {
|
|||
cli_installer::install_cli,
|
||||
cli_installer::uninstall_cli,
|
||||
cli_installer::check_cli_installed,
|
||||
start_terminal_recording,
|
||||
stop_terminal_recording,
|
||||
save_terminal_recording,
|
||||
get_recording_status,
|
||||
start_tty_forward,
|
||||
stop_tty_forward,
|
||||
list_tty_forwards,
|
||||
get_tty_forward,
|
||||
get_session_stats,
|
||||
get_monitored_sessions,
|
||||
start_session_monitoring,
|
||||
check_port_availability,
|
||||
detect_port_conflict,
|
||||
resolve_port_conflict,
|
||||
force_kill_process,
|
||||
find_available_ports,
|
||||
get_local_ip_address,
|
||||
get_all_ip_addresses,
|
||||
get_network_interfaces,
|
||||
get_hostname,
|
||||
test_network_connectivity,
|
||||
get_network_stats,
|
||||
show_notification,
|
||||
get_notifications,
|
||||
get_notification_history,
|
||||
mark_notification_as_read,
|
||||
mark_all_notifications_as_read,
|
||||
clear_notification,
|
||||
clear_all_notifications,
|
||||
get_unread_notification_count,
|
||||
update_notification_settings,
|
||||
get_notification_settings,
|
||||
get_welcome_state,
|
||||
should_show_welcome,
|
||||
get_tutorials,
|
||||
get_tutorial_category,
|
||||
complete_tutorial_step,
|
||||
skip_tutorial,
|
||||
reset_tutorial,
|
||||
get_tutorial_progress,
|
||||
show_welcome_window,
|
||||
get_recording_settings,
|
||||
save_recording_settings,
|
||||
get_all_advanced_settings,
|
||||
update_advanced_settings,
|
||||
reset_settings_section,
|
||||
export_settings,
|
||||
import_settings,
|
||||
check_all_permissions,
|
||||
check_permission,
|
||||
request_permission,
|
||||
get_permission_info,
|
||||
get_all_permissions,
|
||||
get_required_permissions,
|
||||
get_missing_required_permissions,
|
||||
all_required_permissions_granted,
|
||||
open_system_permission_settings,
|
||||
get_permission_stats,
|
||||
check_for_updates,
|
||||
download_update,
|
||||
install_update,
|
||||
cancel_update,
|
||||
get_update_state,
|
||||
get_updater_settings,
|
||||
update_updater_settings,
|
||||
switch_update_channel,
|
||||
get_update_history,
|
||||
get_available_backends,
|
||||
get_backend_config,
|
||||
is_backend_installed,
|
||||
install_backend,
|
||||
start_backend,
|
||||
stop_backend,
|
||||
switch_backend,
|
||||
get_active_backend,
|
||||
get_backend_instances,
|
||||
check_backend_health,
|
||||
get_backend_stats,
|
||||
get_debug_settings,
|
||||
update_debug_settings,
|
||||
log_debug_message,
|
||||
record_performance_metric,
|
||||
take_memory_snapshot,
|
||||
get_debug_logs,
|
||||
get_performance_metrics,
|
||||
get_memory_snapshots,
|
||||
get_network_requests,
|
||||
run_api_tests,
|
||||
run_benchmarks,
|
||||
generate_diagnostic_report,
|
||||
clear_debug_data,
|
||||
set_debug_mode,
|
||||
get_debug_stats,
|
||||
get_api_test_config,
|
||||
update_api_test_config,
|
||||
add_api_test_suite,
|
||||
get_api_test_suite,
|
||||
list_api_test_suites,
|
||||
run_single_api_test,
|
||||
run_api_test_suite,
|
||||
get_api_test_history,
|
||||
clear_api_test_history,
|
||||
import_postman_collection,
|
||||
export_api_test_suite,
|
||||
get_auth_cache_config,
|
||||
update_auth_cache_config,
|
||||
store_auth_token,
|
||||
get_auth_token,
|
||||
store_auth_credential,
|
||||
get_auth_credential,
|
||||
clear_auth_cache_entry,
|
||||
clear_all_auth_cache,
|
||||
get_auth_cache_stats,
|
||||
list_auth_cache_entries,
|
||||
export_auth_cache,
|
||||
import_auth_cache,
|
||||
hash_password,
|
||||
create_auth_cache_key,
|
||||
detect_installed_terminals,
|
||||
get_default_terminal,
|
||||
set_default_terminal,
|
||||
launch_terminal_emulator,
|
||||
get_terminal_config,
|
||||
update_terminal_config,
|
||||
list_detected_terminals,
|
||||
create_terminal_ssh_url,
|
||||
get_terminal_integration_stats,
|
||||
// Settings UI Commands
|
||||
get_all_settings,
|
||||
update_setting,
|
||||
set_dashboard_password,
|
||||
restart_server_with_port,
|
||||
update_server_bind_address,
|
||||
set_dock_icon_visibility,
|
||||
set_log_level,
|
||||
test_api_endpoint,
|
||||
get_server_logs,
|
||||
export_logs,
|
||||
get_local_ip,
|
||||
detect_terminals,
|
||||
// App Mover Commands
|
||||
app_mover::prompt_move_to_applications,
|
||||
app_mover::is_in_applications_folder,
|
||||
// Terminal Spawn Service Commands
|
||||
terminal_spawn_service::spawn_terminal_for_session,
|
||||
terminal_spawn_service::spawn_terminal_with_command,
|
||||
terminal_spawn_service::spawn_custom_terminal,
|
||||
])
|
||||
.setup(|app| {
|
||||
// Set app handle in managers
|
||||
let state = app.state::<AppState>();
|
||||
let notification_manager = state.notification_manager.clone();
|
||||
let welcome_manager = state.welcome_manager.clone();
|
||||
let permissions_manager = state.permissions_manager.clone();
|
||||
let update_manager = state.update_manager.clone();
|
||||
let app_handle = app.handle().clone();
|
||||
let app_handle2 = app.handle().clone();
|
||||
let app_handle3 = app.handle().clone();
|
||||
let app_handle4 = app.handle().clone();
|
||||
let app_handle_for_move = app.handle().clone();
|
||||
tauri::async_runtime::spawn(async move {
|
||||
notification_manager.set_app_handle(app_handle).await;
|
||||
welcome_manager.set_app_handle(app_handle2).await;
|
||||
permissions_manager.set_app_handle(app_handle3).await;
|
||||
update_manager.set_app_handle(app_handle4).await;
|
||||
|
||||
// Load welcome state and check if should show welcome
|
||||
let _ = welcome_manager.load_state().await;
|
||||
if welcome_manager.should_show_welcome().await {
|
||||
let _ = welcome_manager.show_welcome_window().await;
|
||||
}
|
||||
|
||||
// Check permissions on startup
|
||||
let _ = permissions_manager.check_all_permissions().await;
|
||||
|
||||
// Check if app should be moved to Applications folder (macOS only)
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
let app_handle_move = app_handle_for_move.clone();
|
||||
tokio::spawn(async move {
|
||||
// Small delay to let the app fully initialize
|
||||
tokio::time::sleep(tokio::time::Duration::from_secs(2)).await;
|
||||
let _ = app_mover::check_and_prompt_move(app_handle_move).await;
|
||||
});
|
||||
}
|
||||
|
||||
// Load updater settings and start auto-check
|
||||
let _ = update_manager.load_settings().await;
|
||||
update_manager.clone().start_auto_check().await;
|
||||
});
|
||||
|
||||
// Create system tray icon using menu-bar-icon.png with template mode
|
||||
let icon_path = app.path().resource_dir().unwrap().join("icons/menu-bar-icon.png");
|
||||
let tray_icon = if let Ok(icon_data) = std::fs::read(&icon_path) {
|
||||
|
|
@ -370,9 +576,16 @@ async fn start_server_with_monitoring(app_handle: AppHandle) {
|
|||
|
||||
// Update tray menu with server status
|
||||
update_tray_menu_status(&app_handle, status.port, 0);
|
||||
|
||||
// Show notification
|
||||
let _ = state.notification_manager.notify_server_status(true, status.port).await;
|
||||
}
|
||||
Err(e) => {
|
||||
tracing::error!("Failed to start server: {}", e);
|
||||
let _ = state.notification_manager.notify_error(
|
||||
"Server Start Failed",
|
||||
&format!("Failed to start server: {}", e)
|
||||
).await;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -416,11 +629,18 @@ async fn start_server_with_monitoring(app_handle: AppHandle) {
|
|||
|
||||
// Notify frontend of server restart
|
||||
if let Some(window) = monitoring_app.get_webview_window("main") {
|
||||
let _ = window.emit("server:restarted", status);
|
||||
let _ = window.emit("server:restarted", &status);
|
||||
}
|
||||
|
||||
// Show notification
|
||||
let _ = monitoring_state.notification_manager.notify_server_status(true, status.port).await;
|
||||
}
|
||||
Err(e) => {
|
||||
tracing::error!("Failed to restart server: {}", e);
|
||||
let _ = monitoring_state.notification_manager.notify_error(
|
||||
"Server Restart Failed",
|
||||
&format!("Failed to restart server: {}", e)
|
||||
).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -436,11 +656,18 @@ async fn start_server_with_monitoring(app_handle: AppHandle) {
|
|||
|
||||
// Notify frontend of server restart
|
||||
if let Some(window) = monitoring_app.get_webview_window("main") {
|
||||
let _ = window.emit("server:restarted", status);
|
||||
let _ = window.emit("server:restarted", &status);
|
||||
}
|
||||
|
||||
// Show notification
|
||||
let _ = monitoring_state.notification_manager.notify_server_status(true, status.port).await;
|
||||
}
|
||||
Err(e) => {
|
||||
tracing::error!("Failed to start server: {}", e);
|
||||
let _ = monitoring_state.notification_manager.notify_error(
|
||||
"Server Start Failed",
|
||||
&format!("Failed to start server: {}", e)
|
||||
).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -501,9 +728,9 @@ async fn start_server_internal(state: &AppState) -> Result<ServerStatus, String>
|
|||
// Start HTTP server with auth if configured
|
||||
let mut http_server = if settings.dashboard.enable_password && !settings.dashboard.password.is_empty() {
|
||||
let auth_config = crate::auth::AuthConfig::new(true, Some(settings.dashboard.password));
|
||||
HttpServer::with_auth(state.terminal_manager.clone(), auth_config)
|
||||
HttpServer::with_auth(state.terminal_manager.clone(), state.session_monitor.clone(), auth_config)
|
||||
} else {
|
||||
HttpServer::new(state.terminal_manager.clone())
|
||||
HttpServer::new(state.terminal_manager.clone(), state.session_monitor.clone())
|
||||
};
|
||||
|
||||
// Start server with appropriate access mode
|
||||
|
|
|
|||
289
tauri/src-tauri/src/network_utils.rs
Normal file
289
tauri/src-tauri/src/network_utils.rs
Normal file
|
|
@ -0,0 +1,289 @@
|
|||
use serde::{Serialize, Deserialize};
|
||||
use std::net::{IpAddr, Ipv4Addr, Ipv6Addr};
|
||||
use tracing::error;
|
||||
|
||||
/// Network interface information
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct NetworkInterface {
|
||||
pub name: String,
|
||||
pub addresses: Vec<IpAddress>,
|
||||
pub is_up: bool,
|
||||
pub is_loopback: bool,
|
||||
}
|
||||
|
||||
/// IP address with type information
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct IpAddress {
|
||||
pub address: String,
|
||||
pub is_ipv4: bool,
|
||||
pub is_ipv6: bool,
|
||||
pub is_private: bool,
|
||||
}
|
||||
|
||||
/// Network utilities
|
||||
pub struct NetworkUtils;
|
||||
|
||||
impl NetworkUtils {
|
||||
/// Get the primary local IP address
|
||||
pub fn get_local_ip_address() -> Option<String> {
|
||||
// Try to get network interfaces
|
||||
let interfaces = Self::get_all_interfaces();
|
||||
|
||||
// First, try to find a private network address (192.168.x.x, 10.x.x.x, 172.16-31.x.x)
|
||||
for interface in &interfaces {
|
||||
if interface.is_loopback || !interface.is_up {
|
||||
continue;
|
||||
}
|
||||
|
||||
for addr in &interface.addresses {
|
||||
if addr.is_ipv4 && addr.is_private {
|
||||
return Some(addr.address.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If no private address found, return any non-loopback IPv4
|
||||
for interface in &interfaces {
|
||||
if interface.is_loopback || !interface.is_up {
|
||||
continue;
|
||||
}
|
||||
|
||||
for addr in &interface.addresses {
|
||||
if addr.is_ipv4 {
|
||||
return Some(addr.address.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// Get all IP addresses
|
||||
pub fn get_all_ip_addresses() -> Vec<String> {
|
||||
let interfaces = Self::get_all_interfaces();
|
||||
let mut addresses = Vec::new();
|
||||
|
||||
for interface in interfaces {
|
||||
if interface.is_loopback {
|
||||
continue;
|
||||
}
|
||||
|
||||
for addr in interface.addresses {
|
||||
addresses.push(addr.address);
|
||||
}
|
||||
}
|
||||
|
||||
addresses
|
||||
}
|
||||
|
||||
/// Get all network interfaces
|
||||
pub fn get_all_interfaces() -> Vec<NetworkInterface> {
|
||||
#[cfg(unix)]
|
||||
{
|
||||
Self::get_interfaces_unix()
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
{
|
||||
Self::get_interfaces_windows()
|
||||
}
|
||||
|
||||
#[cfg(not(any(unix, windows)))]
|
||||
{
|
||||
Vec::new()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
fn get_interfaces_unix() -> Vec<NetworkInterface> {
|
||||
use nix::ifaddrs::getifaddrs;
|
||||
|
||||
let mut interfaces = std::collections::HashMap::new();
|
||||
|
||||
match getifaddrs() {
|
||||
Ok(addrs) => {
|
||||
for ifaddr in addrs {
|
||||
let name = ifaddr.interface_name.clone();
|
||||
let flags = ifaddr.flags;
|
||||
|
||||
let interface = interfaces.entry(name.clone()).or_insert_with(|| NetworkInterface {
|
||||
name,
|
||||
addresses: Vec::new(),
|
||||
is_up: flags.contains(nix::net::if_::InterfaceFlags::IFF_UP),
|
||||
is_loopback: flags.contains(nix::net::if_::InterfaceFlags::IFF_LOOPBACK),
|
||||
});
|
||||
|
||||
if let Some(address) = ifaddr.address {
|
||||
if let Some(sockaddr) = address.as_sockaddr_in() {
|
||||
let ip = IpAddr::V4(Ipv4Addr::from(sockaddr.ip()));
|
||||
interface.addresses.push(IpAddress {
|
||||
address: ip.to_string(),
|
||||
is_ipv4: true,
|
||||
is_ipv6: false,
|
||||
is_private: Self::is_private_ip(&ip),
|
||||
});
|
||||
} else if let Some(sockaddr) = address.as_sockaddr_in6() {
|
||||
let ip = IpAddr::V6(sockaddr.ip());
|
||||
interface.addresses.push(IpAddress {
|
||||
address: ip.to_string(),
|
||||
is_ipv4: false,
|
||||
is_ipv6: true,
|
||||
is_private: Self::is_private_ip(&ip),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
error!("Failed to get network interfaces: {}", e);
|
||||
}
|
||||
}
|
||||
|
||||
interfaces.into_values().collect()
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
fn get_interfaces_windows() -> Vec<NetworkInterface> {
|
||||
use ipconfig::get_adapters;
|
||||
|
||||
let mut interfaces = Vec::new();
|
||||
|
||||
match get_adapters() {
|
||||
Ok(adapters) => {
|
||||
for adapter in adapters {
|
||||
let mut addresses = Vec::new();
|
||||
|
||||
// Get IPv4 addresses
|
||||
for addr in adapter.ipv4_addresses() {
|
||||
addresses.push(IpAddress {
|
||||
address: addr.to_string(),
|
||||
is_ipv4: true,
|
||||
is_ipv6: false,
|
||||
is_private: Self::is_private_ipv4(addr),
|
||||
});
|
||||
}
|
||||
|
||||
// Get IPv6 addresses
|
||||
for addr in adapter.ipv6_addresses() {
|
||||
addresses.push(IpAddress {
|
||||
address: addr.to_string(),
|
||||
is_ipv4: false,
|
||||
is_ipv6: true,
|
||||
is_private: Self::is_private_ipv6(addr),
|
||||
});
|
||||
}
|
||||
|
||||
interfaces.push(NetworkInterface {
|
||||
name: adapter.friendly_name().to_string(),
|
||||
addresses,
|
||||
is_up: adapter.oper_status() == ipconfig::OperStatus::IfOperStatusUp,
|
||||
is_loopback: adapter.if_type() == ipconfig::IfType::SoftwareLoopback,
|
||||
});
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
error!("Failed to get network interfaces: {}", e);
|
||||
}
|
||||
}
|
||||
|
||||
interfaces
|
||||
}
|
||||
|
||||
/// Check if an IP address is private
|
||||
fn is_private_ip(ip: &IpAddr) -> bool {
|
||||
match ip {
|
||||
IpAddr::V4(ipv4) => Self::is_private_ipv4(ipv4),
|
||||
IpAddr::V6(ipv6) => Self::is_private_ipv6(ipv6),
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if an IPv4 address is private
|
||||
fn is_private_ipv4(ip: &Ipv4Addr) -> bool {
|
||||
let octets = ip.octets();
|
||||
|
||||
// 10.0.0.0/8
|
||||
if octets[0] == 10 {
|
||||
return true;
|
||||
}
|
||||
|
||||
// 172.16.0.0/12
|
||||
if octets[0] == 172 && (octets[1] >= 16 && octets[1] <= 31) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// 192.168.0.0/16
|
||||
if octets[0] == 192 && octets[1] == 168 {
|
||||
return true;
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
/// Check if an IPv6 address is private
|
||||
fn is_private_ipv6(ip: &Ipv6Addr) -> bool {
|
||||
// Check for link-local addresses (fe80::/10)
|
||||
let segments = ip.segments();
|
||||
if segments[0] & 0xffc0 == 0xfe80 {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check for unique local addresses (fc00::/7)
|
||||
if segments[0] & 0xfe00 == 0xfc00 {
|
||||
return true;
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
/// Get hostname
|
||||
pub fn get_hostname() -> Option<String> {
|
||||
hostname::get()
|
||||
.ok()
|
||||
.and_then(|name| name.into_string().ok())
|
||||
}
|
||||
|
||||
/// Test network connectivity to a host
|
||||
pub async fn test_connectivity(host: &str, port: u16) -> bool {
|
||||
use tokio::net::TcpStream;
|
||||
use tokio::time::timeout;
|
||||
use std::time::Duration;
|
||||
|
||||
let addr = format!("{}:{}", host, port);
|
||||
match timeout(Duration::from_secs(3), TcpStream::connect(&addr)).await {
|
||||
Ok(Ok(_)) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Get network statistics
|
||||
pub fn get_network_stats() -> NetworkStats {
|
||||
NetworkStats {
|
||||
hostname: Self::get_hostname(),
|
||||
primary_ip: Self::get_local_ip_address(),
|
||||
all_ips: Self::get_all_ip_addresses(),
|
||||
interface_count: Self::get_all_interfaces().len(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Network statistics
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct NetworkStats {
|
||||
pub hostname: Option<String>,
|
||||
pub primary_ip: Option<String>,
|
||||
pub all_ips: Vec<String>,
|
||||
pub interface_count: usize,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_private_ipv4() {
|
||||
assert!(NetworkUtils::is_private_ipv4(&"10.0.0.1".parse().unwrap()));
|
||||
assert!(NetworkUtils::is_private_ipv4(&"172.16.0.1".parse().unwrap()));
|
||||
assert!(NetworkUtils::is_private_ipv4(&"192.168.1.1".parse().unwrap()));
|
||||
assert!(!NetworkUtils::is_private_ipv4(&"8.8.8.8".parse().unwrap()));
|
||||
}
|
||||
}
|
||||
383
tauri/src-tauri/src/notification_manager.rs
Normal file
383
tauri/src-tauri/src/notification_manager.rs
Normal file
|
|
@ -0,0 +1,383 @@
|
|||
use serde::{Serialize, Deserialize};
|
||||
use tauri::{AppHandle, Emitter};
|
||||
use tauri_plugin_notification::NotificationExt;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::RwLock;
|
||||
use std::collections::HashMap;
|
||||
use chrono::{DateTime, Utc};
|
||||
|
||||
/// Notification type enumeration
|
||||
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq, Hash)]
|
||||
pub enum NotificationType {
|
||||
Info,
|
||||
Success,
|
||||
Warning,
|
||||
Error,
|
||||
ServerStatus,
|
||||
UpdateAvailable,
|
||||
PermissionRequired,
|
||||
SessionEvent,
|
||||
}
|
||||
|
||||
/// Notification priority levels
|
||||
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub enum NotificationPriority {
|
||||
Low,
|
||||
Normal,
|
||||
High,
|
||||
Critical,
|
||||
}
|
||||
|
||||
/// Notification structure
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Notification {
|
||||
pub id: String,
|
||||
pub notification_type: NotificationType,
|
||||
pub priority: NotificationPriority,
|
||||
pub title: String,
|
||||
pub body: String,
|
||||
pub timestamp: DateTime<Utc>,
|
||||
pub read: bool,
|
||||
pub actions: Vec<NotificationAction>,
|
||||
pub metadata: HashMap<String, serde_json::Value>,
|
||||
}
|
||||
|
||||
/// Notification action
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct NotificationAction {
|
||||
pub id: String,
|
||||
pub label: String,
|
||||
pub action_type: String,
|
||||
}
|
||||
|
||||
/// Notification settings
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct NotificationSettings {
|
||||
pub enabled: bool,
|
||||
pub show_in_system: bool,
|
||||
pub play_sound: bool,
|
||||
pub enabled_types: HashMap<NotificationType, bool>,
|
||||
}
|
||||
|
||||
impl Default for NotificationSettings {
|
||||
fn default() -> Self {
|
||||
let mut enabled_types = HashMap::new();
|
||||
enabled_types.insert(NotificationType::Info, true);
|
||||
enabled_types.insert(NotificationType::Success, true);
|
||||
enabled_types.insert(NotificationType::Warning, true);
|
||||
enabled_types.insert(NotificationType::Error, true);
|
||||
enabled_types.insert(NotificationType::ServerStatus, true);
|
||||
enabled_types.insert(NotificationType::UpdateAvailable, true);
|
||||
enabled_types.insert(NotificationType::PermissionRequired, true);
|
||||
enabled_types.insert(NotificationType::SessionEvent, false);
|
||||
|
||||
Self {
|
||||
enabled: true,
|
||||
show_in_system: true,
|
||||
play_sound: true,
|
||||
enabled_types,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Notification manager
|
||||
pub struct NotificationManager {
|
||||
app_handle: Arc<RwLock<Option<AppHandle>>>,
|
||||
notifications: Arc<RwLock<HashMap<String, Notification>>>,
|
||||
settings: Arc<RwLock<NotificationSettings>>,
|
||||
notification_history: Arc<RwLock<Vec<Notification>>>,
|
||||
max_history_size: usize,
|
||||
}
|
||||
|
||||
impl NotificationManager {
|
||||
/// Create a new notification manager
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
app_handle: Arc::new(RwLock::new(None)),
|
||||
notifications: Arc::new(RwLock::new(HashMap::new())),
|
||||
settings: Arc::new(RwLock::new(NotificationSettings::default())),
|
||||
notification_history: Arc::new(RwLock::new(Vec::new())),
|
||||
max_history_size: 100,
|
||||
}
|
||||
}
|
||||
|
||||
/// Set the app handle
|
||||
pub async fn set_app_handle(&self, app_handle: AppHandle) {
|
||||
*self.app_handle.write().await = Some(app_handle);
|
||||
}
|
||||
|
||||
/// Update notification settings
|
||||
pub async fn update_settings(&self, settings: NotificationSettings) {
|
||||
*self.settings.write().await = settings;
|
||||
}
|
||||
|
||||
/// Get notification settings
|
||||
pub async fn get_settings(&self) -> NotificationSettings {
|
||||
self.settings.read().await.clone()
|
||||
}
|
||||
|
||||
/// Show a notification
|
||||
pub async fn show_notification(
|
||||
&self,
|
||||
notification_type: NotificationType,
|
||||
priority: NotificationPriority,
|
||||
title: String,
|
||||
body: String,
|
||||
actions: Vec<NotificationAction>,
|
||||
metadata: HashMap<String, serde_json::Value>,
|
||||
) -> Result<String, String> {
|
||||
let settings = self.settings.read().await;
|
||||
|
||||
// Check if notifications are enabled
|
||||
if !settings.enabled {
|
||||
return Ok("notifications_disabled".to_string());
|
||||
}
|
||||
|
||||
// Check if this notification type is enabled
|
||||
if let Some(&enabled) = settings.enabled_types.get(¬ification_type) {
|
||||
if !enabled {
|
||||
return Ok("notification_type_disabled".to_string());
|
||||
}
|
||||
}
|
||||
|
||||
let notification_id = uuid::Uuid::new_v4().to_string();
|
||||
let notification = Notification {
|
||||
id: notification_id.clone(),
|
||||
notification_type,
|
||||
priority,
|
||||
title: title.clone(),
|
||||
body: body.clone(),
|
||||
timestamp: Utc::now(),
|
||||
read: false,
|
||||
actions,
|
||||
metadata,
|
||||
};
|
||||
|
||||
// Store notification
|
||||
self.notifications.write().await.insert(notification_id.clone(), notification.clone());
|
||||
|
||||
// Add to history
|
||||
let mut history = self.notification_history.write().await;
|
||||
history.push(notification.clone());
|
||||
|
||||
// Trim history if it exceeds max size
|
||||
if history.len() > self.max_history_size {
|
||||
let drain_count = history.len() - self.max_history_size;
|
||||
history.drain(0..drain_count);
|
||||
}
|
||||
|
||||
// Show system notification if enabled
|
||||
if settings.show_in_system {
|
||||
match self.show_system_notification(&title, &body, notification_type).await {
|
||||
Ok(_) => {},
|
||||
Err(e) => {
|
||||
tracing::error!("Failed to show system notification: {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Emit notification event to frontend
|
||||
if let Some(app_handle) = self.app_handle.read().await.as_ref() {
|
||||
app_handle.emit("notification:new", ¬ification)
|
||||
.map_err(|e| format!("Failed to emit notification event: {}", e))?;
|
||||
}
|
||||
|
||||
Ok(notification_id)
|
||||
}
|
||||
|
||||
/// Show a system notification using Tauri's notification plugin
|
||||
async fn show_system_notification(
|
||||
&self,
|
||||
title: &str,
|
||||
body: &str,
|
||||
notification_type: NotificationType,
|
||||
) -> Result<(), String> {
|
||||
let app_handle_guard = self.app_handle.read().await;
|
||||
let app_handle = app_handle_guard.as_ref()
|
||||
.ok_or_else(|| "App handle not set".to_string())?;
|
||||
|
||||
let mut builder = app_handle.notification()
|
||||
.builder()
|
||||
.title(title)
|
||||
.body(body);
|
||||
|
||||
// Set icon based on notification type
|
||||
let icon = match notification_type {
|
||||
NotificationType::Success => Some("✅"),
|
||||
NotificationType::Warning => Some("⚠️"),
|
||||
NotificationType::Error => Some("❌"),
|
||||
NotificationType::UpdateAvailable => Some("🔄"),
|
||||
NotificationType::PermissionRequired => Some("🔐"),
|
||||
NotificationType::ServerStatus => Some("🖥️"),
|
||||
NotificationType::SessionEvent => Some("💻"),
|
||||
NotificationType::Info => Some("ℹ️"),
|
||||
};
|
||||
|
||||
if let Some(icon_str) = icon {
|
||||
builder = builder.icon(icon_str);
|
||||
}
|
||||
|
||||
builder.show()
|
||||
.map_err(|e| format!("Failed to show notification: {}", e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Mark notification as read
|
||||
pub async fn mark_as_read(&self, notification_id: &str) -> Result<(), String> {
|
||||
let mut notifications = self.notifications.write().await;
|
||||
if let Some(notification) = notifications.get_mut(notification_id) {
|
||||
notification.read = true;
|
||||
|
||||
// Update history
|
||||
let mut history = self.notification_history.write().await;
|
||||
if let Some(hist_notification) = history.iter_mut().find(|n| n.id == notification_id) {
|
||||
hist_notification.read = true;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
} else {
|
||||
Err("Notification not found".to_string())
|
||||
}
|
||||
}
|
||||
|
||||
/// Mark all notifications as read
|
||||
pub async fn mark_all_as_read(&self) -> Result<(), String> {
|
||||
let mut notifications = self.notifications.write().await;
|
||||
for notification in notifications.values_mut() {
|
||||
notification.read = true;
|
||||
}
|
||||
|
||||
let mut history = self.notification_history.write().await;
|
||||
for notification in history.iter_mut() {
|
||||
notification.read = true;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get all notifications
|
||||
pub async fn get_notifications(&self) -> Vec<Notification> {
|
||||
self.notifications.read().await.values().cloned().collect()
|
||||
}
|
||||
|
||||
/// Get unread notification count
|
||||
pub async fn get_unread_count(&self) -> usize {
|
||||
self.notifications.read().await
|
||||
.values()
|
||||
.filter(|n| !n.read)
|
||||
.count()
|
||||
}
|
||||
|
||||
/// Get notification history
|
||||
pub async fn get_history(&self, limit: Option<usize>) -> Vec<Notification> {
|
||||
let history = self.notification_history.read().await;
|
||||
match limit {
|
||||
Some(l) => history.iter().rev().take(l).cloned().collect(),
|
||||
None => history.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Clear notification
|
||||
pub async fn clear_notification(&self, notification_id: &str) -> Result<(), String> {
|
||||
self.notifications.write().await.remove(notification_id);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Clear all notifications
|
||||
pub async fn clear_all_notifications(&self) -> Result<(), String> {
|
||||
self.notifications.write().await.clear();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Show server status notification
|
||||
pub async fn notify_server_status(&self, running: bool, port: u16) -> Result<String, String> {
|
||||
let (title, body) = if running {
|
||||
(
|
||||
"Server Started".to_string(),
|
||||
format!("VibeTunnel server is now running on port {}", port),
|
||||
)
|
||||
} else {
|
||||
(
|
||||
"Server Stopped".to_string(),
|
||||
"VibeTunnel server has been stopped".to_string(),
|
||||
)
|
||||
};
|
||||
|
||||
self.show_notification(
|
||||
NotificationType::ServerStatus,
|
||||
NotificationPriority::Normal,
|
||||
title,
|
||||
body,
|
||||
vec![],
|
||||
HashMap::new(),
|
||||
).await
|
||||
}
|
||||
|
||||
/// Show update available notification
|
||||
pub async fn notify_update_available(&self, version: &str, download_url: &str) -> Result<String, String> {
|
||||
let mut metadata = HashMap::new();
|
||||
metadata.insert("version".to_string(), serde_json::Value::String(version.to_string()));
|
||||
metadata.insert("download_url".to_string(), serde_json::Value::String(download_url.to_string()));
|
||||
|
||||
self.show_notification(
|
||||
NotificationType::UpdateAvailable,
|
||||
NotificationPriority::High,
|
||||
"Update Available".to_string(),
|
||||
format!("VibeTunnel {} is now available. Click to download.", version),
|
||||
vec![
|
||||
NotificationAction {
|
||||
id: "download".to_string(),
|
||||
label: "Download".to_string(),
|
||||
action_type: "open_url".to_string(),
|
||||
}
|
||||
],
|
||||
metadata,
|
||||
).await
|
||||
}
|
||||
|
||||
/// Show permission required notification
|
||||
pub async fn notify_permission_required(&self, permission: &str, reason: &str) -> Result<String, String> {
|
||||
let mut metadata = HashMap::new();
|
||||
metadata.insert("permission".to_string(), serde_json::Value::String(permission.to_string()));
|
||||
|
||||
self.show_notification(
|
||||
NotificationType::PermissionRequired,
|
||||
NotificationPriority::High,
|
||||
"Permission Required".to_string(),
|
||||
format!("{} permission is required: {}", permission, reason),
|
||||
vec![
|
||||
NotificationAction {
|
||||
id: "grant".to_string(),
|
||||
label: "Grant Permission".to_string(),
|
||||
action_type: "request_permission".to_string(),
|
||||
}
|
||||
],
|
||||
metadata,
|
||||
).await
|
||||
}
|
||||
|
||||
/// Show error notification
|
||||
pub async fn notify_error(&self, title: &str, error_message: &str) -> Result<String, String> {
|
||||
self.show_notification(
|
||||
NotificationType::Error,
|
||||
NotificationPriority::High,
|
||||
title.to_string(),
|
||||
error_message.to_string(),
|
||||
vec![],
|
||||
HashMap::new(),
|
||||
).await
|
||||
}
|
||||
|
||||
/// Show success notification
|
||||
pub async fn notify_success(&self, title: &str, message: &str) -> Result<String, String> {
|
||||
self.show_notification(
|
||||
NotificationType::Success,
|
||||
NotificationPriority::Normal,
|
||||
title.to_string(),
|
||||
message.to_string(),
|
||||
vec![],
|
||||
HashMap::new(),
|
||||
).await
|
||||
}
|
||||
}
|
||||
529
tauri/src-tauri/src/permissions.rs
Normal file
529
tauri/src-tauri/src/permissions.rs
Normal file
|
|
@ -0,0 +1,529 @@
|
|||
use serde::{Serialize, Deserialize};
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::RwLock;
|
||||
use std::collections::HashMap;
|
||||
use chrono::{DateTime, Utc};
|
||||
use tauri::AppHandle;
|
||||
|
||||
/// Permission type enumeration
|
||||
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq, Hash)]
|
||||
pub enum PermissionType {
|
||||
ScreenRecording,
|
||||
Accessibility,
|
||||
NetworkAccess,
|
||||
FileSystemFull,
|
||||
FileSystemRestricted,
|
||||
TerminalAccess,
|
||||
NotificationAccess,
|
||||
CameraAccess,
|
||||
MicrophoneAccess,
|
||||
AutoStart,
|
||||
}
|
||||
|
||||
/// Permission status
|
||||
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)]
|
||||
pub enum PermissionStatus {
|
||||
Granted,
|
||||
Denied,
|
||||
NotDetermined,
|
||||
Restricted,
|
||||
NotApplicable,
|
||||
}
|
||||
|
||||
/// Permission information
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct PermissionInfo {
|
||||
pub permission_type: PermissionType,
|
||||
pub status: PermissionStatus,
|
||||
pub required: bool,
|
||||
pub platform_specific: bool,
|
||||
pub description: String,
|
||||
pub last_checked: Option<DateTime<Utc>>,
|
||||
pub request_count: u32,
|
||||
}
|
||||
|
||||
/// Permission request result
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct PermissionRequestResult {
|
||||
pub permission_type: PermissionType,
|
||||
pub status: PermissionStatus,
|
||||
pub message: Option<String>,
|
||||
pub requires_restart: bool,
|
||||
pub requires_system_settings: bool,
|
||||
}
|
||||
|
||||
/// Platform-specific permission settings
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct PlatformPermissions {
|
||||
pub macos: HashMap<PermissionType, PermissionInfo>,
|
||||
pub windows: HashMap<PermissionType, PermissionInfo>,
|
||||
pub linux: HashMap<PermissionType, PermissionInfo>,
|
||||
}
|
||||
|
||||
/// Permissions manager
|
||||
pub struct PermissionsManager {
|
||||
permissions: Arc<RwLock<HashMap<PermissionType, PermissionInfo>>>,
|
||||
app_handle: Arc<RwLock<Option<AppHandle>>>,
|
||||
notification_manager: Option<Arc<crate::notification_manager::NotificationManager>>,
|
||||
}
|
||||
|
||||
impl PermissionsManager {
|
||||
/// Create a new permissions manager
|
||||
pub fn new() -> Self {
|
||||
let manager = Self {
|
||||
permissions: Arc::new(RwLock::new(HashMap::new())),
|
||||
app_handle: Arc::new(RwLock::new(None)),
|
||||
notification_manager: None,
|
||||
};
|
||||
|
||||
// Initialize default permissions
|
||||
tokio::spawn({
|
||||
let permissions = manager.permissions.clone();
|
||||
async move {
|
||||
let default_permissions = Self::initialize_permissions();
|
||||
*permissions.write().await = default_permissions;
|
||||
}
|
||||
});
|
||||
|
||||
manager
|
||||
}
|
||||
|
||||
/// Set the app handle
|
||||
pub async fn set_app_handle(&self, app_handle: AppHandle) {
|
||||
*self.app_handle.write().await = Some(app_handle);
|
||||
}
|
||||
|
||||
/// Set the notification manager
|
||||
pub fn set_notification_manager(&mut self, notification_manager: Arc<crate::notification_manager::NotificationManager>) {
|
||||
self.notification_manager = Some(notification_manager);
|
||||
}
|
||||
|
||||
/// Initialize default permissions based on platform
|
||||
fn initialize_permissions() -> HashMap<PermissionType, PermissionInfo> {
|
||||
let mut permissions = HashMap::new();
|
||||
|
||||
// Get current platform
|
||||
let platform = std::env::consts::OS;
|
||||
|
||||
match platform {
|
||||
"macos" => {
|
||||
permissions.insert(PermissionType::ScreenRecording, PermissionInfo {
|
||||
permission_type: PermissionType::ScreenRecording,
|
||||
status: PermissionStatus::NotDetermined,
|
||||
required: false,
|
||||
platform_specific: true,
|
||||
description: "Required for recording terminal sessions with system UI".to_string(),
|
||||
last_checked: None,
|
||||
request_count: 0,
|
||||
});
|
||||
|
||||
permissions.insert(PermissionType::Accessibility, PermissionInfo {
|
||||
permission_type: PermissionType::Accessibility,
|
||||
status: PermissionStatus::NotDetermined,
|
||||
required: false,
|
||||
platform_specific: true,
|
||||
description: "Required for advanced terminal integration features".to_string(),
|
||||
last_checked: None,
|
||||
request_count: 0,
|
||||
});
|
||||
|
||||
permissions.insert(PermissionType::NotificationAccess, PermissionInfo {
|
||||
permission_type: PermissionType::NotificationAccess,
|
||||
status: PermissionStatus::NotDetermined,
|
||||
required: false,
|
||||
platform_specific: true,
|
||||
description: "Required to show system notifications".to_string(),
|
||||
last_checked: None,
|
||||
request_count: 0,
|
||||
});
|
||||
}
|
||||
"windows" => {
|
||||
permissions.insert(PermissionType::TerminalAccess, PermissionInfo {
|
||||
permission_type: PermissionType::TerminalAccess,
|
||||
status: PermissionStatus::NotDetermined,
|
||||
required: true,
|
||||
platform_specific: true,
|
||||
description: "Required to create and manage terminal sessions".to_string(),
|
||||
last_checked: None,
|
||||
request_count: 0,
|
||||
});
|
||||
|
||||
permissions.insert(PermissionType::AutoStart, PermissionInfo {
|
||||
permission_type: PermissionType::AutoStart,
|
||||
status: PermissionStatus::NotDetermined,
|
||||
required: false,
|
||||
platform_specific: true,
|
||||
description: "Required to start VibeTunnel with Windows".to_string(),
|
||||
last_checked: None,
|
||||
request_count: 0,
|
||||
});
|
||||
}
|
||||
"linux" => {
|
||||
permissions.insert(PermissionType::FileSystemFull, PermissionInfo {
|
||||
permission_type: PermissionType::FileSystemFull,
|
||||
status: PermissionStatus::Granted,
|
||||
required: true,
|
||||
platform_specific: false,
|
||||
description: "Required for saving recordings and configurations".to_string(),
|
||||
last_checked: None,
|
||||
request_count: 0,
|
||||
});
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
// Add common permissions
|
||||
permissions.insert(PermissionType::NetworkAccess, PermissionInfo {
|
||||
permission_type: PermissionType::NetworkAccess,
|
||||
status: PermissionStatus::Granted,
|
||||
required: true,
|
||||
platform_specific: false,
|
||||
description: "Required for web server and remote access features".to_string(),
|
||||
last_checked: None,
|
||||
request_count: 0,
|
||||
});
|
||||
|
||||
permissions.insert(PermissionType::FileSystemRestricted, PermissionInfo {
|
||||
permission_type: PermissionType::FileSystemRestricted,
|
||||
status: PermissionStatus::Granted,
|
||||
required: true,
|
||||
platform_specific: false,
|
||||
description: "Required for basic application functionality".to_string(),
|
||||
last_checked: None,
|
||||
request_count: 0,
|
||||
});
|
||||
|
||||
permissions
|
||||
}
|
||||
|
||||
/// Check all permissions
|
||||
pub async fn check_all_permissions(&self) -> Vec<PermissionInfo> {
|
||||
let mut permissions = self.permissions.write().await;
|
||||
|
||||
for (permission_type, info) in permissions.iter_mut() {
|
||||
info.status = self.check_permission_internal(*permission_type).await;
|
||||
info.last_checked = Some(Utc::now());
|
||||
}
|
||||
|
||||
permissions.values().cloned().collect()
|
||||
}
|
||||
|
||||
/// Check specific permission
|
||||
pub async fn check_permission(&self, permission_type: PermissionType) -> PermissionStatus {
|
||||
let status = self.check_permission_internal(permission_type).await;
|
||||
|
||||
// Update stored status
|
||||
if let Some(info) = self.permissions.write().await.get_mut(&permission_type) {
|
||||
info.status = status;
|
||||
info.last_checked = Some(Utc::now());
|
||||
}
|
||||
|
||||
status
|
||||
}
|
||||
|
||||
/// Internal permission checking logic
|
||||
async fn check_permission_internal(&self, permission_type: PermissionType) -> PermissionStatus {
|
||||
let platform = std::env::consts::OS;
|
||||
|
||||
match (platform, permission_type) {
|
||||
#[cfg(target_os = "macos")]
|
||||
("macos", PermissionType::ScreenRecording) => {
|
||||
self.check_screen_recording_permission_macos().await
|
||||
}
|
||||
#[cfg(target_os = "macos")]
|
||||
("macos", PermissionType::Accessibility) => {
|
||||
self.check_accessibility_permission_macos().await
|
||||
}
|
||||
#[cfg(target_os = "macos")]
|
||||
("macos", PermissionType::NotificationAccess) => {
|
||||
self.check_notification_permission_macos().await
|
||||
}
|
||||
#[cfg(target_os = "windows")]
|
||||
("windows", PermissionType::TerminalAccess) => {
|
||||
self.check_terminal_permission_windows().await
|
||||
}
|
||||
#[cfg(target_os = "windows")]
|
||||
("windows", PermissionType::AutoStart) => {
|
||||
self.check_auto_start_permission_windows().await
|
||||
}
|
||||
_ => PermissionStatus::NotApplicable,
|
||||
}
|
||||
}
|
||||
|
||||
/// Request permission
|
||||
pub async fn request_permission(&self, permission_type: PermissionType) -> Result<PermissionRequestResult, String> {
|
||||
// Update request count
|
||||
if let Some(info) = self.permissions.write().await.get_mut(&permission_type) {
|
||||
info.request_count += 1;
|
||||
}
|
||||
|
||||
let platform = std::env::consts::OS;
|
||||
|
||||
match (platform, permission_type) {
|
||||
#[cfg(target_os = "macos")]
|
||||
("macos", PermissionType::ScreenRecording) => {
|
||||
self.request_screen_recording_permission_macos().await
|
||||
}
|
||||
#[cfg(target_os = "macos")]
|
||||
("macos", PermissionType::Accessibility) => {
|
||||
self.request_accessibility_permission_macos().await
|
||||
}
|
||||
#[cfg(target_os = "macos")]
|
||||
("macos", PermissionType::NotificationAccess) => {
|
||||
self.request_notification_permission_macos().await
|
||||
}
|
||||
_ => Ok(PermissionRequestResult {
|
||||
permission_type,
|
||||
status: PermissionStatus::NotApplicable,
|
||||
message: Some("Permission not applicable on this platform".to_string()),
|
||||
requires_restart: false,
|
||||
requires_system_settings: false,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
/// Get permission info
|
||||
pub async fn get_permission_info(&self, permission_type: PermissionType) -> Option<PermissionInfo> {
|
||||
self.permissions.read().await.get(&permission_type).cloned()
|
||||
}
|
||||
|
||||
/// Get all permissions
|
||||
pub async fn get_all_permissions(&self) -> Vec<PermissionInfo> {
|
||||
self.permissions.read().await.values().cloned().collect()
|
||||
}
|
||||
|
||||
/// Get required permissions
|
||||
pub async fn get_required_permissions(&self) -> Vec<PermissionInfo> {
|
||||
self.permissions.read().await
|
||||
.values()
|
||||
.filter(|info| info.required)
|
||||
.cloned()
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get missing required permissions
|
||||
pub async fn get_missing_required_permissions(&self) -> Vec<PermissionInfo> {
|
||||
self.permissions.read().await
|
||||
.values()
|
||||
.filter(|info| info.required && info.status != PermissionStatus::Granted)
|
||||
.cloned()
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Check if all required permissions are granted
|
||||
pub async fn all_required_permissions_granted(&self) -> bool {
|
||||
!self.permissions.read().await
|
||||
.values()
|
||||
.any(|info| info.required && info.status != PermissionStatus::Granted)
|
||||
}
|
||||
|
||||
/// Open system settings for permission
|
||||
pub async fn open_system_settings(&self, permission_type: PermissionType) -> Result<(), String> {
|
||||
let platform = std::env::consts::OS;
|
||||
|
||||
match (platform, permission_type) {
|
||||
#[cfg(target_os = "macos")]
|
||||
("macos", PermissionType::ScreenRecording) => {
|
||||
self.open_screen_recording_settings_macos().await
|
||||
}
|
||||
#[cfg(target_os = "macos")]
|
||||
("macos", PermissionType::Accessibility) => {
|
||||
self.open_accessibility_settings_macos().await
|
||||
}
|
||||
#[cfg(target_os = "macos")]
|
||||
("macos", PermissionType::NotificationAccess) => {
|
||||
self.open_notification_settings_macos().await
|
||||
}
|
||||
#[cfg(target_os = "windows")]
|
||||
("windows", PermissionType::AutoStart) => {
|
||||
self.open_startup_settings_windows().await
|
||||
}
|
||||
_ => Err("No system settings available for this permission".to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
// Platform-specific implementations
|
||||
#[cfg(target_os = "macos")]
|
||||
async fn check_screen_recording_permission_macos(&self) -> PermissionStatus {
|
||||
// Use CGDisplayStream API to check screen recording permission
|
||||
use std::process::Command;
|
||||
|
||||
let output = Command::new("osascript")
|
||||
.arg("-e")
|
||||
.arg("tell application \"System Events\" to get properties")
|
||||
.output();
|
||||
|
||||
match output {
|
||||
Ok(output) if output.status.success() => PermissionStatus::Granted,
|
||||
_ => PermissionStatus::NotDetermined,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
async fn request_screen_recording_permission_macos(&self) -> Result<PermissionRequestResult, String> {
|
||||
// Show notification about needing to grant permission
|
||||
if let Some(notification_manager) = &self.notification_manager {
|
||||
let _ = notification_manager.notify_permission_required(
|
||||
"Screen Recording",
|
||||
"VibeTunnel needs screen recording permission to capture terminal sessions"
|
||||
).await;
|
||||
}
|
||||
|
||||
// Open system preferences
|
||||
let _ = self.open_screen_recording_settings_macos().await;
|
||||
|
||||
Ok(PermissionRequestResult {
|
||||
permission_type: PermissionType::ScreenRecording,
|
||||
status: PermissionStatus::NotDetermined,
|
||||
message: Some("Please grant screen recording permission in System Settings".to_string()),
|
||||
requires_restart: true,
|
||||
requires_system_settings: true,
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
async fn open_screen_recording_settings_macos(&self) -> Result<(), String> {
|
||||
use std::process::Command;
|
||||
|
||||
Command::new("open")
|
||||
.arg("x-apple.systempreferences:com.apple.preference.security?Privacy_ScreenCapture")
|
||||
.spawn()
|
||||
.map_err(|e| format!("Failed to open system preferences: {}", e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
async fn check_accessibility_permission_macos(&self) -> PermissionStatus {
|
||||
use std::process::Command;
|
||||
|
||||
let output = Command::new("osascript")
|
||||
.arg("-e")
|
||||
.arg("tell application \"System Events\" to get UI elements enabled")
|
||||
.output();
|
||||
|
||||
match output {
|
||||
Ok(output) if output.status.success() => {
|
||||
let result = String::from_utf8_lossy(&output.stdout);
|
||||
if result.trim() == "true" {
|
||||
PermissionStatus::Granted
|
||||
} else {
|
||||
PermissionStatus::Denied
|
||||
}
|
||||
}
|
||||
_ => PermissionStatus::NotDetermined,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
async fn request_accessibility_permission_macos(&self) -> Result<PermissionRequestResult, String> {
|
||||
let _ = self.open_accessibility_settings_macos().await;
|
||||
|
||||
Ok(PermissionRequestResult {
|
||||
permission_type: PermissionType::Accessibility,
|
||||
status: PermissionStatus::NotDetermined,
|
||||
message: Some("Please grant accessibility permission in System Settings".to_string()),
|
||||
requires_restart: false,
|
||||
requires_system_settings: true,
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
async fn open_accessibility_settings_macos(&self) -> Result<(), String> {
|
||||
use std::process::Command;
|
||||
|
||||
Command::new("open")
|
||||
.arg("x-apple.systempreferences:com.apple.preference.security?Privacy_Accessibility")
|
||||
.spawn()
|
||||
.map_err(|e| format!("Failed to open system preferences: {}", e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
async fn check_notification_permission_macos(&self) -> PermissionStatus {
|
||||
// For now, assume granted as Tauri handles this
|
||||
PermissionStatus::Granted
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
async fn request_notification_permission_macos(&self) -> Result<PermissionRequestResult, String> {
|
||||
Ok(PermissionRequestResult {
|
||||
permission_type: PermissionType::NotificationAccess,
|
||||
status: PermissionStatus::Granted,
|
||||
message: Some("Notification permission is handled by the system".to_string()),
|
||||
requires_restart: false,
|
||||
requires_system_settings: false,
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
async fn open_notification_settings_macos(&self) -> Result<(), String> {
|
||||
use std::process::Command;
|
||||
|
||||
Command::new("open")
|
||||
.arg("x-apple.systempreferences:com.apple.preference.notifications")
|
||||
.spawn()
|
||||
.map_err(|e| format!("Failed to open system preferences: {}", e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
async fn check_terminal_permission_windows(&self) -> PermissionStatus {
|
||||
// On Windows, terminal access is generally granted
|
||||
PermissionStatus::Granted
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
async fn check_auto_start_permission_windows(&self) -> PermissionStatus {
|
||||
// Check if auto-start is configured
|
||||
use crate::auto_launch;
|
||||
|
||||
match auto_launch::get_auto_launch().await {
|
||||
Ok(enabled) => {
|
||||
if enabled {
|
||||
PermissionStatus::Granted
|
||||
} else {
|
||||
PermissionStatus::Denied
|
||||
}
|
||||
}
|
||||
Err(_) => PermissionStatus::NotDetermined,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
async fn open_startup_settings_windows(&self) -> Result<(), String> {
|
||||
use std::process::Command;
|
||||
|
||||
Command::new("cmd")
|
||||
.args(&["/c", "start", "ms-settings:startupapps"])
|
||||
.spawn()
|
||||
.map_err(|e| format!("Failed to open startup settings: {}", e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Show permission required notification
|
||||
pub async fn notify_permission_required(&self, permission_info: &PermissionInfo) -> Result<(), String> {
|
||||
if let Some(notification_manager) = &self.notification_manager {
|
||||
notification_manager.notify_permission_required(
|
||||
&format!("{:?}", permission_info.permission_type),
|
||||
&permission_info.description
|
||||
).await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Permission statistics
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct PermissionStats {
|
||||
pub total_permissions: usize,
|
||||
pub granted_permissions: usize,
|
||||
pub denied_permissions: usize,
|
||||
pub required_permissions: usize,
|
||||
pub missing_required: usize,
|
||||
pub platform: String,
|
||||
}
|
||||
470
tauri/src-tauri/src/port_conflict.rs
Normal file
470
tauri/src-tauri/src/port_conflict.rs
Normal file
|
|
@ -0,0 +1,470 @@
|
|||
use std::process::Command;
|
||||
use std::net::TcpListener;
|
||||
use serde::{Serialize, Deserialize};
|
||||
use tracing::{info, error};
|
||||
|
||||
/// Information about a process using a port
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ProcessDetails {
|
||||
pub pid: u32,
|
||||
pub name: String,
|
||||
pub path: Option<String>,
|
||||
pub parent_pid: Option<u32>,
|
||||
}
|
||||
|
||||
impl ProcessDetails {
|
||||
/// Check if this is a VibeTunnel process
|
||||
pub fn is_vibetunnel(&self) -> bool {
|
||||
if let Some(path) = &self.path {
|
||||
return path.contains("vibetunnel") || path.contains("VibeTunnel");
|
||||
}
|
||||
self.name.contains("vibetunnel") || self.name.contains("VibeTunnel")
|
||||
}
|
||||
|
||||
/// Check if this is one of our managed servers
|
||||
pub fn is_managed_server(&self) -> bool {
|
||||
self.name == "vibetunnel" ||
|
||||
self.name.contains("node") && self.path.as_ref().map(|p| p.contains("VibeTunnel")).unwrap_or(false)
|
||||
}
|
||||
}
|
||||
|
||||
/// Information about a port conflict
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct PortConflict {
|
||||
pub port: u16,
|
||||
pub process: ProcessDetails,
|
||||
pub root_process: Option<ProcessDetails>,
|
||||
pub suggested_action: ConflictAction,
|
||||
pub alternative_ports: Vec<u16>,
|
||||
}
|
||||
|
||||
/// Suggested action for resolving a port conflict
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(tag = "type", rename_all = "snake_case")]
|
||||
pub enum ConflictAction {
|
||||
KillOurInstance { pid: u32, process_name: String },
|
||||
SuggestAlternativePort,
|
||||
ReportExternalApp { name: String },
|
||||
}
|
||||
|
||||
/// Port conflict resolver
|
||||
pub struct PortConflictResolver;
|
||||
|
||||
impl PortConflictResolver {
|
||||
/// Check if a port is available
|
||||
pub async fn is_port_available(port: u16) -> bool {
|
||||
TcpListener::bind(format!("127.0.0.1:{}", port)).is_ok()
|
||||
}
|
||||
|
||||
/// Detect what process is using a port
|
||||
pub async fn detect_conflict(port: u16) -> Option<PortConflict> {
|
||||
// First check if port is actually in use
|
||||
if Self::is_port_available(port).await {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Platform-specific conflict detection
|
||||
#[cfg(target_os = "macos")]
|
||||
return Self::detect_conflict_macos(port).await;
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
return Self::detect_conflict_linux(port).await;
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
return Self::detect_conflict_windows(port).await;
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
async fn detect_conflict_macos(port: u16) -> Option<PortConflict> {
|
||||
// Use lsof to find process using the port
|
||||
let output = Command::new("/usr/sbin/lsof")
|
||||
.args(&["-i", &format!(":{}", port), "-n", "-P", "-F"])
|
||||
.output()
|
||||
.ok()?;
|
||||
|
||||
if !output.status.success() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
let process_info = Self::parse_lsof_output(&stdout)?;
|
||||
|
||||
// Get root process
|
||||
let root_process = Self::find_root_process(&process_info).await;
|
||||
|
||||
// Find alternative ports
|
||||
let alternatives = Self::find_available_ports(port, 3).await;
|
||||
|
||||
// Determine action
|
||||
let action = Self::determine_action(&process_info, &root_process);
|
||||
|
||||
Some(PortConflict {
|
||||
port,
|
||||
process: process_info,
|
||||
root_process,
|
||||
suggested_action: action,
|
||||
alternative_ports: alternatives,
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
async fn detect_conflict_linux(port: u16) -> Option<PortConflict> {
|
||||
// Try lsof first
|
||||
if let Ok(output) = Command::new("lsof")
|
||||
.args(&["-i", &format!(":{}", port), "-n", "-P", "-F"])
|
||||
.output()
|
||||
{
|
||||
if output.status.success() {
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
if let Some(process_info) = Self::parse_lsof_output(&stdout) {
|
||||
let root_process = Self::find_root_process(&process_info).await;
|
||||
let alternatives = Self::find_available_ports(port, 3).await;
|
||||
let action = Self::determine_action(&process_info, &root_process);
|
||||
|
||||
return Some(PortConflict {
|
||||
port,
|
||||
process: process_info,
|
||||
root_process,
|
||||
suggested_action: action,
|
||||
alternative_ports: alternatives,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback to netstat
|
||||
if let Ok(output) = Command::new("netstat")
|
||||
.args(&["-tulpn"])
|
||||
.output()
|
||||
{
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
// Parse netstat output (simplified)
|
||||
for line in stdout.lines() {
|
||||
if line.contains(&format!(":{}", port)) && line.contains("LISTEN") {
|
||||
// Extract PID from line (format: "tcp ... LISTEN 1234/process")
|
||||
if let Some(pid_part) = line.split_whitespace().last() {
|
||||
if let Some(pid_str) = pid_part.split('/').next() {
|
||||
if let Ok(pid) = pid_str.parse::<u32>() {
|
||||
let name = pid_part.split('/').nth(1).unwrap_or("unknown").to_string();
|
||||
let process_info = ProcessDetails {
|
||||
pid,
|
||||
name,
|
||||
path: None,
|
||||
parent_pid: None,
|
||||
};
|
||||
|
||||
let alternatives = Self::find_available_ports(port, 3).await;
|
||||
let action = Self::determine_action(&process_info, &None);
|
||||
|
||||
return Some(PortConflict {
|
||||
port,
|
||||
process: process_info,
|
||||
root_process: None,
|
||||
suggested_action: action,
|
||||
alternative_ports: alternatives,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
async fn detect_conflict_windows(port: u16) -> Option<PortConflict> {
|
||||
// Use netstat to find process using the port
|
||||
let output = Command::new("netstat")
|
||||
.args(&["-ano", "-p", "tcp"])
|
||||
.output()
|
||||
.ok()?;
|
||||
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
|
||||
// Parse netstat output to find the PID
|
||||
for line in stdout.lines() {
|
||||
if line.contains(&format!(":{}", port)) && line.contains("LISTENING") {
|
||||
// Extract PID from the last column
|
||||
if let Some(pid_str) = line.split_whitespace().last() {
|
||||
if let Ok(pid) = pid_str.parse::<u32>() {
|
||||
// Get process name using tasklist
|
||||
if let Ok(tasklist_output) = Command::new("tasklist")
|
||||
.args(&["/FI", &format!("PID eq {}", pid), "/FO", "CSV", "/NH"])
|
||||
.output()
|
||||
{
|
||||
let tasklist_stdout = String::from_utf8_lossy(&tasklist_output.stdout);
|
||||
if let Some(line) = tasklist_stdout.lines().next() {
|
||||
let parts: Vec<&str> = line.split(',').collect();
|
||||
if parts.len() > 0 {
|
||||
let name = parts[0].trim_matches('"').to_string();
|
||||
let process_info = ProcessDetails {
|
||||
pid,
|
||||
name,
|
||||
path: None,
|
||||
parent_pid: None,
|
||||
};
|
||||
|
||||
let alternatives = Self::find_available_ports(port, 3).await;
|
||||
let action = Self::determine_action(&process_info, &None);
|
||||
|
||||
return Some(PortConflict {
|
||||
port,
|
||||
process: process_info,
|
||||
root_process: None,
|
||||
suggested_action: action,
|
||||
alternative_ports: alternatives,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// Parse lsof output
|
||||
fn parse_lsof_output(output: &str) -> Option<ProcessDetails> {
|
||||
let mut pid: Option<u32> = None;
|
||||
let mut name: Option<String> = None;
|
||||
let mut ppid: Option<u32> = None;
|
||||
|
||||
// Parse lsof field output format
|
||||
for line in output.lines() {
|
||||
if line.starts_with('p') {
|
||||
pid = line[1..].parse().ok();
|
||||
} else if line.starts_with('c') {
|
||||
name = Some(line[1..].to_string());
|
||||
} else if line.starts_with('R') {
|
||||
ppid = line[1..].parse().ok();
|
||||
}
|
||||
}
|
||||
|
||||
if let (Some(pid), Some(name)) = (pid, name) {
|
||||
// Get additional process info
|
||||
let path = Self::get_process_path(pid);
|
||||
|
||||
Some(ProcessDetails {
|
||||
pid,
|
||||
name,
|
||||
path,
|
||||
parent_pid: ppid,
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Get process path
|
||||
fn get_process_path(pid: u32) -> Option<String> {
|
||||
#[cfg(unix)]
|
||||
{
|
||||
if let Ok(output) = Command::new("ps")
|
||||
.args(&["-p", &pid.to_string(), "-o", "comm="])
|
||||
.output()
|
||||
{
|
||||
let path = String::from_utf8_lossy(&output.stdout)
|
||||
.trim()
|
||||
.to_string();
|
||||
if !path.is_empty() {
|
||||
return Some(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// Find root process
|
||||
async fn find_root_process(process: &ProcessDetails) -> Option<ProcessDetails> {
|
||||
let mut current = process.clone();
|
||||
let mut visited = std::collections::HashSet::new();
|
||||
|
||||
while let Some(parent_pid) = current.parent_pid {
|
||||
if parent_pid <= 1 || visited.contains(&parent_pid) {
|
||||
break;
|
||||
}
|
||||
visited.insert(current.pid);
|
||||
|
||||
// Get parent process info
|
||||
if let Some(parent_info) = Self::get_process_info(parent_pid).await {
|
||||
// If parent is VibeTunnel, it's our root
|
||||
if parent_info.is_vibetunnel() {
|
||||
return Some(parent_info);
|
||||
}
|
||||
current = parent_info;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// Get process info by PID
|
||||
async fn get_process_info(pid: u32) -> Option<ProcessDetails> {
|
||||
#[cfg(unix)]
|
||||
{
|
||||
if let Ok(output) = Command::new("ps")
|
||||
.args(&["-p", &pid.to_string(), "-o", "pid=,ppid=,comm="])
|
||||
.output()
|
||||
{
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
let parts: Vec<&str> = stdout.trim().split_whitespace().collect();
|
||||
|
||||
if parts.len() >= 3 {
|
||||
let pid = parts[0].parse().ok()?;
|
||||
let ppid = parts[1].parse().ok();
|
||||
let name = parts[2..].join(" ");
|
||||
let path = Self::get_process_path(pid);
|
||||
|
||||
return Some(ProcessDetails {
|
||||
pid,
|
||||
name,
|
||||
path,
|
||||
parent_pid: ppid,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
{
|
||||
// Windows implementation would use WMI or similar
|
||||
// For now, return None
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// Find available ports near a given port
|
||||
async fn find_available_ports(near_port: u16, count: usize) -> Vec<u16> {
|
||||
let mut available_ports = Vec::new();
|
||||
let start = near_port.saturating_sub(10).max(1024);
|
||||
let end = near_port.saturating_add(100).min(65535);
|
||||
|
||||
for port in start..=end {
|
||||
if port != near_port && Self::is_port_available(port).await {
|
||||
available_ports.push(port);
|
||||
if available_ports.len() >= count {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
available_ports
|
||||
}
|
||||
|
||||
/// Determine action for conflict resolution
|
||||
fn determine_action(process: &ProcessDetails, root_process: &Option<ProcessDetails>) -> ConflictAction {
|
||||
// If it's our managed server, kill it
|
||||
if process.is_managed_server() {
|
||||
return ConflictAction::KillOurInstance {
|
||||
pid: process.pid,
|
||||
process_name: process.name.clone(),
|
||||
};
|
||||
}
|
||||
|
||||
// If root process is VibeTunnel, kill the whole app
|
||||
if let Some(root) = root_process {
|
||||
if root.is_vibetunnel() {
|
||||
return ConflictAction::KillOurInstance {
|
||||
pid: root.pid,
|
||||
process_name: root.name.clone(),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// If the process itself is VibeTunnel
|
||||
if process.is_vibetunnel() {
|
||||
return ConflictAction::KillOurInstance {
|
||||
pid: process.pid,
|
||||
process_name: process.name.clone(),
|
||||
};
|
||||
}
|
||||
|
||||
// Otherwise, it's an external app
|
||||
ConflictAction::ReportExternalApp {
|
||||
name: process.name.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Resolve a port conflict
|
||||
pub async fn resolve_conflict(conflict: &PortConflict) -> Result<(), String> {
|
||||
match &conflict.suggested_action {
|
||||
ConflictAction::KillOurInstance { pid, process_name } => {
|
||||
info!("Killing conflicting process: {} (PID: {})", process_name, pid);
|
||||
|
||||
#[cfg(unix)]
|
||||
{
|
||||
let output = Command::new("kill")
|
||||
.args(&["-9", &pid.to_string()])
|
||||
.output()
|
||||
.map_err(|e| format!("Failed to execute kill command: {}", e))?;
|
||||
|
||||
if !output.status.success() {
|
||||
return Err(format!("Failed to kill process {}", pid));
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
{
|
||||
let output = Command::new("taskkill")
|
||||
.args(&["/F", "/PID", &pid.to_string()])
|
||||
.output()
|
||||
.map_err(|e| format!("Failed to execute taskkill command: {}", e))?;
|
||||
|
||||
if !output.status.success() {
|
||||
return Err(format!("Failed to kill process {}", pid));
|
||||
}
|
||||
}
|
||||
|
||||
// Wait for port to be released
|
||||
tokio::time::sleep(tokio::time::Duration::from_millis(500)).await;
|
||||
Ok(())
|
||||
}
|
||||
ConflictAction::SuggestAlternativePort | ConflictAction::ReportExternalApp { .. } => {
|
||||
// These require user action
|
||||
Err("This conflict requires user action to resolve".to_string())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Force kill a process
|
||||
pub async fn force_kill_process(conflict: &PortConflict) -> Result<(), String> {
|
||||
info!("Force killing process: {} (PID: {})", conflict.process.name, conflict.process.pid);
|
||||
|
||||
#[cfg(unix)]
|
||||
{
|
||||
let output = Command::new("kill")
|
||||
.args(&["-9", &conflict.process.pid.to_string()])
|
||||
.output()
|
||||
.map_err(|e| format!("Failed to execute kill command: {}", e))?;
|
||||
|
||||
if !output.status.success() {
|
||||
error!("Failed to kill process with regular permissions");
|
||||
return Err(format!("Failed to kill process {}", conflict.process.pid));
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
{
|
||||
let output = Command::new("taskkill")
|
||||
.args(&["/F", "/PID", &conflict.process.pid.to_string()])
|
||||
.output()
|
||||
.map_err(|e| format!("Failed to execute taskkill command: {}", e))?;
|
||||
|
||||
if !output.status.success() {
|
||||
return Err(format!("Failed to kill process {}", conflict.process.pid));
|
||||
}
|
||||
}
|
||||
|
||||
// Wait for port to be released
|
||||
tokio::time::sleep(tokio::time::Duration::from_millis(500)).await;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
@ -23,17 +23,20 @@ use std::fs;
|
|||
|
||||
use crate::terminal::TerminalManager;
|
||||
use crate::auth::{AuthConfig, auth_middleware, check_auth, login};
|
||||
use crate::session_monitor::SessionMonitor;
|
||||
|
||||
// Combined app state for Axum
|
||||
#[derive(Clone)]
|
||||
struct AppState {
|
||||
terminal_manager: Arc<TerminalManager>,
|
||||
auth_config: Arc<AuthConfig>,
|
||||
session_monitor: Arc<SessionMonitor>,
|
||||
}
|
||||
|
||||
pub struct HttpServer {
|
||||
terminal_manager: Arc<TerminalManager>,
|
||||
auth_config: Arc<AuthConfig>,
|
||||
session_monitor: Arc<SessionMonitor>,
|
||||
port: u16,
|
||||
shutdown_tx: Option<tokio::sync::oneshot::Sender<()>>,
|
||||
handle: Option<tokio::task::JoinHandle<()>>,
|
||||
|
|
@ -97,20 +100,22 @@ impl HttpServer {
|
|||
self.port
|
||||
}
|
||||
|
||||
pub fn new(terminal_manager: Arc<TerminalManager>) -> Self {
|
||||
pub fn new(terminal_manager: Arc<TerminalManager>, session_monitor: Arc<SessionMonitor>) -> Self {
|
||||
Self {
|
||||
terminal_manager,
|
||||
auth_config: Arc::new(AuthConfig::new(false, None)),
|
||||
session_monitor,
|
||||
port: 0,
|
||||
shutdown_tx: None,
|
||||
handle: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_auth(terminal_manager: Arc<TerminalManager>, auth_config: AuthConfig) -> Self {
|
||||
pub fn with_auth(terminal_manager: Arc<TerminalManager>, session_monitor: Arc<SessionMonitor>, auth_config: AuthConfig) -> Self {
|
||||
Self {
|
||||
terminal_manager,
|
||||
auth_config: Arc::new(auth_config),
|
||||
session_monitor,
|
||||
port: 0,
|
||||
shutdown_tx: None,
|
||||
handle: None,
|
||||
|
|
@ -203,6 +208,7 @@ impl HttpServer {
|
|||
let app_state = AppState {
|
||||
terminal_manager: self.terminal_manager.clone(),
|
||||
auth_config: self.auth_config.clone(),
|
||||
session_monitor: self.session_monitor.clone(),
|
||||
};
|
||||
|
||||
// Don't serve static files in Tauri - the frontend is served by Tauri itself
|
||||
|
|
@ -223,8 +229,16 @@ impl HttpServer {
|
|||
.route("/api/sessions/:id/input", post(send_input))
|
||||
.route("/api/sessions/:id/stream", get(terminal_stream))
|
||||
.route("/api/sessions/:id/snapshot", get(get_snapshot))
|
||||
.route("/api/sessions/events", get(session_events_stream))
|
||||
.route("/api/ws/:id", get(terminal_websocket))
|
||||
.route("/api/fs/browse", get(browse_directory))
|
||||
.route("/api/fs/info", get(crate::fs_api::get_file_info))
|
||||
.route("/api/fs/read", get(crate::fs_api::read_file))
|
||||
.route("/api/fs/write", post(crate::fs_api::write_file))
|
||||
.route("/api/fs/delete", delete(crate::fs_api::delete_file))
|
||||
.route("/api/fs/move", post(crate::fs_api::move_file))
|
||||
.route("/api/fs/copy", post(crate::fs_api::copy_file))
|
||||
.route("/api/fs/search", get(crate::fs_api::search_files))
|
||||
.route("/api/mkdir", post(create_directory))
|
||||
.route("/api/cleanup-exited", post(cleanup_exited))
|
||||
.layer(middleware::from_fn_with_state(
|
||||
|
|
@ -456,7 +470,7 @@ async fn terminal_stream(
|
|||
|
||||
// Poll for terminal output
|
||||
let mut poll_interval = interval(Duration::from_millis(10));
|
||||
let mut exit_sent = false;
|
||||
let exit_sent = false;
|
||||
|
||||
loop {
|
||||
poll_interval.tick().await;
|
||||
|
|
@ -469,7 +483,7 @@ async fn terminal_stream(
|
|||
yield Ok(Event::default()
|
||||
.event("data")
|
||||
.data(exit_event));
|
||||
exit_sent = true;
|
||||
let _ = exit_sent; // Prevent duplicate exit events
|
||||
break;
|
||||
}
|
||||
|
||||
|
|
@ -495,7 +509,6 @@ async fn terminal_stream(
|
|||
yield Ok(Event::default()
|
||||
.event("data")
|
||||
.data(exit_event));
|
||||
exit_sent = true;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
|
@ -506,6 +519,26 @@ async fn terminal_stream(
|
|||
Ok(Sse::new(stream).keep_alive(KeepAlive::default()))
|
||||
}
|
||||
|
||||
// Session monitoring SSE endpoint
|
||||
async fn session_events_stream(
|
||||
AxumState(state): AxumState<AppState>,
|
||||
) -> Result<Sse<impl Stream<Item = Result<Event, Infallible>>>, StatusCode> {
|
||||
// Clone the session monitor Arc to avoid lifetime issues
|
||||
let session_monitor = state.session_monitor.clone();
|
||||
|
||||
// Start monitoring if not already started
|
||||
session_monitor.start_monitoring().await;
|
||||
|
||||
// Create SSE stream from session monitor
|
||||
let stream = session_monitor.create_sse_stream()
|
||||
.map(|data| {
|
||||
data.map(|json| Event::default().data(json))
|
||||
.map_err(|_| unreachable!())
|
||||
});
|
||||
|
||||
Ok(Sse::new(stream).keep_alive(KeepAlive::default()))
|
||||
}
|
||||
|
||||
// File system endpoints
|
||||
async fn browse_directory(
|
||||
Query(params): Query<BrowseQuery>,
|
||||
|
|
|
|||
267
tauri/src-tauri/src/session_monitor.rs
Normal file
267
tauri/src-tauri/src/session_monitor.rs
Normal file
|
|
@ -0,0 +1,267 @@
|
|||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::{RwLock, mpsc};
|
||||
use tokio::time::{interval, Duration};
|
||||
use chrono::Utc;
|
||||
use serde::{Serialize, Deserialize};
|
||||
use serde_json;
|
||||
use uuid::Uuid;
|
||||
|
||||
/// Information about a terminal session
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct SessionInfo {
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
pub pid: u32,
|
||||
pub rows: u16,
|
||||
pub cols: u16,
|
||||
pub created_at: String,
|
||||
pub last_activity: String,
|
||||
pub is_active: bool,
|
||||
pub client_count: usize,
|
||||
}
|
||||
|
||||
/// Session state change event
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(tag = "type", rename_all = "snake_case")]
|
||||
pub enum SessionEvent {
|
||||
SessionCreated {
|
||||
session: SessionInfo,
|
||||
},
|
||||
SessionUpdated {
|
||||
session: SessionInfo,
|
||||
},
|
||||
SessionClosed {
|
||||
id: String,
|
||||
},
|
||||
SessionActivity {
|
||||
id: String,
|
||||
timestamp: String,
|
||||
},
|
||||
}
|
||||
|
||||
/// Session monitoring service
|
||||
pub struct SessionMonitor {
|
||||
sessions: Arc<RwLock<HashMap<String, SessionInfo>>>,
|
||||
event_subscribers: Arc<RwLock<HashMap<String, mpsc::UnboundedSender<SessionEvent>>>>,
|
||||
terminal_manager: Arc<crate::terminal::TerminalManager>,
|
||||
}
|
||||
|
||||
impl SessionMonitor {
|
||||
pub fn new(terminal_manager: Arc<crate::terminal::TerminalManager>) -> Self {
|
||||
Self {
|
||||
sessions: Arc::new(RwLock::new(HashMap::new())),
|
||||
event_subscribers: Arc::new(RwLock::new(HashMap::new())),
|
||||
terminal_manager,
|
||||
}
|
||||
}
|
||||
|
||||
/// Start monitoring sessions
|
||||
pub async fn start_monitoring(&self) {
|
||||
let sessions = self.sessions.clone();
|
||||
let subscribers = self.event_subscribers.clone();
|
||||
let terminal_manager = self.terminal_manager.clone();
|
||||
|
||||
tokio::spawn(async move {
|
||||
let mut monitor_interval = interval(Duration::from_secs(5));
|
||||
|
||||
loop {
|
||||
monitor_interval.tick().await;
|
||||
|
||||
// Get current sessions from terminal manager
|
||||
let current_sessions = terminal_manager.list_sessions().await;
|
||||
let mut sessions_map = sessions.write().await;
|
||||
let mut updated_sessions = HashMap::new();
|
||||
|
||||
// Check for new or updated sessions
|
||||
for session in current_sessions {
|
||||
let session_info = SessionInfo {
|
||||
id: session.id.clone(),
|
||||
name: session.name.clone(),
|
||||
pid: session.pid,
|
||||
rows: session.rows,
|
||||
cols: session.cols,
|
||||
created_at: session.created_at.clone(),
|
||||
last_activity: Utc::now().to_rfc3339(),
|
||||
is_active: true,
|
||||
client_count: 0, // TODO: Track actual client count
|
||||
};
|
||||
|
||||
// Check if this is a new session
|
||||
if !sessions_map.contains_key(&session.id) {
|
||||
// Broadcast session created event
|
||||
Self::broadcast_event(
|
||||
&subscribers,
|
||||
SessionEvent::SessionCreated {
|
||||
session: session_info.clone()
|
||||
}
|
||||
).await;
|
||||
} else {
|
||||
// Check if session was updated
|
||||
if let Some(existing) = sessions_map.get(&session.id) {
|
||||
if existing.rows != session_info.rows ||
|
||||
existing.cols != session_info.cols {
|
||||
// Broadcast session updated event
|
||||
Self::broadcast_event(
|
||||
&subscribers,
|
||||
SessionEvent::SessionUpdated {
|
||||
session: session_info.clone()
|
||||
}
|
||||
).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
updated_sessions.insert(session.id.clone(), session_info);
|
||||
}
|
||||
|
||||
// Check for closed sessions
|
||||
let closed_sessions: Vec<String> = sessions_map.keys()
|
||||
.filter(|id| !updated_sessions.contains_key(*id))
|
||||
.cloned()
|
||||
.collect();
|
||||
|
||||
for session_id in closed_sessions {
|
||||
// Broadcast session closed event
|
||||
Self::broadcast_event(
|
||||
&subscribers,
|
||||
SessionEvent::SessionClosed {
|
||||
id: session_id.clone()
|
||||
}
|
||||
).await;
|
||||
}
|
||||
|
||||
// Update the sessions map
|
||||
*sessions_map = updated_sessions;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/// Subscribe to session events
|
||||
pub async fn subscribe(&self) -> mpsc::UnboundedReceiver<SessionEvent> {
|
||||
let (tx, rx) = mpsc::unbounded_channel();
|
||||
let subscriber_id = Uuid::new_v4().to_string();
|
||||
|
||||
self.event_subscribers.write().await.insert(subscriber_id, tx);
|
||||
|
||||
rx
|
||||
}
|
||||
|
||||
/// Unsubscribe from session events
|
||||
pub async fn unsubscribe(&self, subscriber_id: &str) {
|
||||
self.event_subscribers.write().await.remove(subscriber_id);
|
||||
}
|
||||
|
||||
/// Get current session count
|
||||
pub async fn get_session_count(&self) -> usize {
|
||||
self.sessions.read().await.len()
|
||||
}
|
||||
|
||||
/// Get all sessions
|
||||
pub async fn get_sessions(&self) -> Vec<SessionInfo> {
|
||||
self.sessions.read().await.values().cloned().collect()
|
||||
}
|
||||
|
||||
/// Get a specific session
|
||||
pub async fn get_session(&self, id: &str) -> Option<SessionInfo> {
|
||||
self.sessions.read().await.get(id).cloned()
|
||||
}
|
||||
|
||||
/// Notify activity for a session
|
||||
pub async fn notify_activity(&self, session_id: &str) {
|
||||
if let Some(session) = self.sessions.write().await.get_mut(session_id) {
|
||||
session.last_activity = Utc::now().to_rfc3339();
|
||||
|
||||
// Broadcast activity event
|
||||
Self::broadcast_event(
|
||||
&self.event_subscribers,
|
||||
SessionEvent::SessionActivity {
|
||||
id: session_id.to_string(),
|
||||
timestamp: session.last_activity.clone(),
|
||||
}
|
||||
).await;
|
||||
}
|
||||
}
|
||||
|
||||
/// Broadcast an event to all subscribers
|
||||
async fn broadcast_event(
|
||||
subscribers: &Arc<RwLock<HashMap<String, mpsc::UnboundedSender<SessionEvent>>>>,
|
||||
event: SessionEvent,
|
||||
) {
|
||||
let subscribers_read = subscribers.read().await;
|
||||
let mut dead_subscribers = Vec::new();
|
||||
|
||||
for (id, tx) in subscribers_read.iter() {
|
||||
if tx.send(event.clone()).is_err() {
|
||||
dead_subscribers.push(id.clone());
|
||||
}
|
||||
}
|
||||
|
||||
// Remove dead subscribers
|
||||
if !dead_subscribers.is_empty() {
|
||||
drop(subscribers_read);
|
||||
let mut subscribers_write = subscribers.write().await;
|
||||
for id in dead_subscribers {
|
||||
subscribers_write.remove(&id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Create an SSE stream for session events
|
||||
pub fn create_sse_stream(self: Arc<Self>) -> impl futures::Stream<Item = Result<String, std::convert::Infallible>> + Send + 'static {
|
||||
async_stream::stream! {
|
||||
// Subscribe to events
|
||||
let (tx, mut rx) = mpsc::unbounded_channel();
|
||||
let subscriber_id = Uuid::new_v4().to_string();
|
||||
self.event_subscribers.write().await.insert(subscriber_id.clone(), tx);
|
||||
|
||||
// Send initial sessions
|
||||
let session_list = self.sessions.read().await.values().cloned().collect::<Vec<_>>();
|
||||
let initial_event = serde_json::json!({
|
||||
"type": "initial",
|
||||
"sessions": session_list,
|
||||
"count": session_list.len()
|
||||
});
|
||||
|
||||
yield Ok(format!("data: {}\n\n", initial_event));
|
||||
|
||||
// Send events as they come
|
||||
while let Some(event) = rx.recv().await {
|
||||
if let Ok(json) = serde_json::to_string(&event) {
|
||||
yield Ok(format!("data: {}\n\n", json));
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up subscriber on drop
|
||||
self.event_subscribers.write().await.remove(&subscriber_id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Session statistics
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct SessionStats {
|
||||
pub total_sessions: usize,
|
||||
pub active_sessions: usize,
|
||||
pub total_clients: usize,
|
||||
pub uptime_seconds: u64,
|
||||
pub sessions_created_today: usize,
|
||||
}
|
||||
|
||||
impl SessionMonitor {
|
||||
/// Get session statistics
|
||||
pub async fn get_stats(&self) -> SessionStats {
|
||||
let sessions = self.sessions.read().await;
|
||||
let active_sessions = sessions.values().filter(|s| s.is_active).count();
|
||||
let total_clients = sessions.values().map(|s| s.client_count).sum();
|
||||
|
||||
// TODO: Track more detailed statistics
|
||||
SessionStats {
|
||||
total_sessions: sessions.len(),
|
||||
active_sessions,
|
||||
total_clients,
|
||||
uptime_seconds: 0, // TODO: Track uptime
|
||||
sessions_created_today: 0, // TODO: Track daily stats
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -3,6 +3,7 @@ use std::path::PathBuf;
|
|||
use directories::ProjectDirs;
|
||||
use tauri::{Manager, State};
|
||||
use crate::state::AppState;
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct GeneralSettings {
|
||||
|
|
@ -10,6 +11,10 @@ pub struct GeneralSettings {
|
|||
pub show_dock_icon: bool,
|
||||
pub default_terminal: String,
|
||||
pub default_shell: String,
|
||||
pub show_welcome_on_startup: Option<bool>,
|
||||
pub theme: Option<String>,
|
||||
pub language: Option<String>,
|
||||
pub check_updates_automatically: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
|
|
@ -19,6 +24,10 @@ pub struct DashboardSettings {
|
|||
pub password: String,
|
||||
pub access_mode: String,
|
||||
pub auto_cleanup: bool,
|
||||
pub session_limit: Option<u32>,
|
||||
pub idle_timeout_minutes: Option<u32>,
|
||||
pub enable_cors: Option<bool>,
|
||||
pub allowed_origins: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
|
|
@ -28,6 +37,135 @@ pub struct AdvancedSettings {
|
|||
pub log_level: String,
|
||||
pub session_timeout: u32,
|
||||
pub ngrok_auth_token: Option<String>,
|
||||
pub ngrok_region: Option<String>,
|
||||
pub ngrok_subdomain: Option<String>,
|
||||
pub enable_telemetry: Option<bool>,
|
||||
pub experimental_features: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct RecordingSettings {
|
||||
pub enabled: bool,
|
||||
pub output_directory: Option<String>,
|
||||
pub format: String,
|
||||
pub include_timing: bool,
|
||||
pub compress_output: bool,
|
||||
pub max_file_size_mb: Option<u32>,
|
||||
pub auto_save: bool,
|
||||
pub filename_template: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct TTYForwardSettings {
|
||||
pub enabled: bool,
|
||||
pub default_port: u16,
|
||||
pub bind_address: String,
|
||||
pub max_connections: u32,
|
||||
pub buffer_size: u32,
|
||||
pub keep_alive: bool,
|
||||
pub authentication: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct MonitoringSettings {
|
||||
pub enabled: bool,
|
||||
pub collect_metrics: bool,
|
||||
pub metric_interval_seconds: u32,
|
||||
pub max_history_size: u32,
|
||||
pub alert_on_high_cpu: bool,
|
||||
pub alert_on_high_memory: bool,
|
||||
pub cpu_threshold_percent: Option<u8>,
|
||||
pub memory_threshold_percent: Option<u8>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct NetworkSettings {
|
||||
pub preferred_interface: Option<String>,
|
||||
pub enable_ipv6: bool,
|
||||
pub dns_servers: Option<Vec<String>>,
|
||||
pub proxy_settings: Option<ProxySettings>,
|
||||
pub connection_timeout_seconds: u32,
|
||||
pub retry_attempts: u32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct ProxySettings {
|
||||
pub enabled: bool,
|
||||
pub proxy_type: String,
|
||||
pub host: String,
|
||||
pub port: u16,
|
||||
pub username: Option<String>,
|
||||
pub password: Option<String>,
|
||||
pub bypass_list: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct PortSettings {
|
||||
pub auto_resolve_conflicts: bool,
|
||||
pub preferred_port_range_start: u16,
|
||||
pub preferred_port_range_end: u16,
|
||||
pub excluded_ports: Option<Vec<u16>>,
|
||||
pub conflict_resolution_strategy: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct NotificationSettings {
|
||||
pub enabled: bool,
|
||||
pub show_in_system: bool,
|
||||
pub play_sound: bool,
|
||||
pub notification_types: HashMap<String, bool>,
|
||||
pub do_not_disturb_enabled: Option<bool>,
|
||||
pub do_not_disturb_start: Option<String>,
|
||||
pub do_not_disturb_end: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct TerminalIntegrationSettings {
|
||||
pub enabled_terminals: HashMap<String, bool>,
|
||||
pub terminal_configs: HashMap<String, TerminalConfig>,
|
||||
pub default_terminal_override: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct TerminalConfig {
|
||||
pub path: Option<String>,
|
||||
pub args: Option<Vec<String>>,
|
||||
pub env: Option<HashMap<String, String>>,
|
||||
pub working_directory: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct UpdateSettings {
|
||||
pub channel: String,
|
||||
pub check_frequency: String,
|
||||
pub auto_download: bool,
|
||||
pub auto_install: bool,
|
||||
pub show_release_notes: bool,
|
||||
pub include_pre_releases: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct SecuritySettings {
|
||||
pub enable_encryption: bool,
|
||||
pub encryption_algorithm: Option<String>,
|
||||
pub require_authentication: bool,
|
||||
pub session_token_expiry_hours: Option<u32>,
|
||||
pub allowed_ip_addresses: Option<Vec<String>>,
|
||||
pub blocked_ip_addresses: Option<Vec<String>>,
|
||||
pub rate_limiting_enabled: bool,
|
||||
pub rate_limit_requests_per_minute: Option<u32>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct DebugSettings {
|
||||
pub enable_debug_menu: bool,
|
||||
pub show_performance_stats: bool,
|
||||
pub enable_verbose_logging: bool,
|
||||
pub log_to_file: bool,
|
||||
pub log_file_path: Option<String>,
|
||||
pub max_log_file_size_mb: Option<u32>,
|
||||
pub enable_dev_tools: bool,
|
||||
pub show_internal_errors: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
|
|
@ -35,16 +173,47 @@ pub struct Settings {
|
|||
pub general: GeneralSettings,
|
||||
pub dashboard: DashboardSettings,
|
||||
pub advanced: AdvancedSettings,
|
||||
pub recording: Option<RecordingSettings>,
|
||||
pub tty_forward: Option<TTYForwardSettings>,
|
||||
pub monitoring: Option<MonitoringSettings>,
|
||||
pub network: Option<NetworkSettings>,
|
||||
pub port: Option<PortSettings>,
|
||||
pub notifications: Option<NotificationSettings>,
|
||||
pub terminal_integrations: Option<TerminalIntegrationSettings>,
|
||||
pub updates: Option<UpdateSettings>,
|
||||
pub security: Option<SecuritySettings>,
|
||||
pub debug: Option<DebugSettings>,
|
||||
}
|
||||
|
||||
impl Default for Settings {
|
||||
fn default() -> Self {
|
||||
let mut default_notification_types = HashMap::new();
|
||||
default_notification_types.insert("info".to_string(), true);
|
||||
default_notification_types.insert("success".to_string(), true);
|
||||
default_notification_types.insert("warning".to_string(), true);
|
||||
default_notification_types.insert("error".to_string(), true);
|
||||
default_notification_types.insert("server_status".to_string(), true);
|
||||
default_notification_types.insert("update_available".to_string(), true);
|
||||
|
||||
let mut enabled_terminals = HashMap::new();
|
||||
enabled_terminals.insert("Terminal".to_string(), true);
|
||||
enabled_terminals.insert("iTerm2".to_string(), true);
|
||||
enabled_terminals.insert("Hyper".to_string(), true);
|
||||
enabled_terminals.insert("Alacritty".to_string(), true);
|
||||
enabled_terminals.insert("Warp".to_string(), true);
|
||||
enabled_terminals.insert("Ghostty".to_string(), false);
|
||||
enabled_terminals.insert("WezTerm".to_string(), false);
|
||||
|
||||
Self {
|
||||
general: GeneralSettings {
|
||||
launch_at_login: false,
|
||||
show_dock_icon: true,
|
||||
default_terminal: "system".to_string(),
|
||||
default_shell: "default".to_string(),
|
||||
show_welcome_on_startup: Some(true),
|
||||
theme: Some("auto".to_string()),
|
||||
language: Some("en".to_string()),
|
||||
check_updates_automatically: Some(true),
|
||||
},
|
||||
dashboard: DashboardSettings {
|
||||
server_port: 4020,
|
||||
|
|
@ -52,6 +221,10 @@ impl Default for Settings {
|
|||
password: String::new(),
|
||||
access_mode: "localhost".to_string(),
|
||||
auto_cleanup: true,
|
||||
session_limit: Some(10),
|
||||
idle_timeout_minutes: Some(30),
|
||||
enable_cors: Some(true),
|
||||
allowed_origins: Some(vec!["*".to_string()]),
|
||||
},
|
||||
advanced: AdvancedSettings {
|
||||
server_mode: "rust".to_string(),
|
||||
|
|
@ -59,7 +232,97 @@ impl Default for Settings {
|
|||
log_level: "info".to_string(),
|
||||
session_timeout: 0,
|
||||
ngrok_auth_token: None,
|
||||
ngrok_region: Some("us".to_string()),
|
||||
ngrok_subdomain: None,
|
||||
enable_telemetry: Some(false),
|
||||
experimental_features: Some(false),
|
||||
},
|
||||
recording: Some(RecordingSettings {
|
||||
enabled: true,
|
||||
output_directory: None,
|
||||
format: "asciinema".to_string(),
|
||||
include_timing: true,
|
||||
compress_output: false,
|
||||
max_file_size_mb: Some(100),
|
||||
auto_save: false,
|
||||
filename_template: Some("vibetunnel_%Y%m%d_%H%M%S".to_string()),
|
||||
}),
|
||||
tty_forward: Some(TTYForwardSettings {
|
||||
enabled: false,
|
||||
default_port: 8022,
|
||||
bind_address: "127.0.0.1".to_string(),
|
||||
max_connections: 5,
|
||||
buffer_size: 4096,
|
||||
keep_alive: true,
|
||||
authentication: None,
|
||||
}),
|
||||
monitoring: Some(MonitoringSettings {
|
||||
enabled: true,
|
||||
collect_metrics: true,
|
||||
metric_interval_seconds: 5,
|
||||
max_history_size: 1000,
|
||||
alert_on_high_cpu: false,
|
||||
alert_on_high_memory: false,
|
||||
cpu_threshold_percent: Some(80),
|
||||
memory_threshold_percent: Some(80),
|
||||
}),
|
||||
network: Some(NetworkSettings {
|
||||
preferred_interface: None,
|
||||
enable_ipv6: true,
|
||||
dns_servers: None,
|
||||
proxy_settings: None,
|
||||
connection_timeout_seconds: 30,
|
||||
retry_attempts: 3,
|
||||
}),
|
||||
port: Some(PortSettings {
|
||||
auto_resolve_conflicts: true,
|
||||
preferred_port_range_start: 4000,
|
||||
preferred_port_range_end: 5000,
|
||||
excluded_ports: None,
|
||||
conflict_resolution_strategy: "increment".to_string(),
|
||||
}),
|
||||
notifications: Some(NotificationSettings {
|
||||
enabled: true,
|
||||
show_in_system: true,
|
||||
play_sound: true,
|
||||
notification_types: default_notification_types,
|
||||
do_not_disturb_enabled: Some(false),
|
||||
do_not_disturb_start: None,
|
||||
do_not_disturb_end: None,
|
||||
}),
|
||||
terminal_integrations: Some(TerminalIntegrationSettings {
|
||||
enabled_terminals,
|
||||
terminal_configs: HashMap::new(),
|
||||
default_terminal_override: None,
|
||||
}),
|
||||
updates: Some(UpdateSettings {
|
||||
channel: "stable".to_string(),
|
||||
check_frequency: "weekly".to_string(),
|
||||
auto_download: false,
|
||||
auto_install: false,
|
||||
show_release_notes: true,
|
||||
include_pre_releases: false,
|
||||
}),
|
||||
security: Some(SecuritySettings {
|
||||
enable_encryption: false,
|
||||
encryption_algorithm: Some("aes-256-gcm".to_string()),
|
||||
require_authentication: false,
|
||||
session_token_expiry_hours: Some(24),
|
||||
allowed_ip_addresses: None,
|
||||
blocked_ip_addresses: None,
|
||||
rate_limiting_enabled: false,
|
||||
rate_limit_requests_per_minute: Some(60),
|
||||
}),
|
||||
debug: Some(DebugSettings {
|
||||
enable_debug_menu: false,
|
||||
show_performance_stats: false,
|
||||
enable_verbose_logging: false,
|
||||
log_to_file: false,
|
||||
log_file_path: None,
|
||||
max_log_file_size_mb: Some(50),
|
||||
enable_dev_tools: false,
|
||||
show_internal_errors: false,
|
||||
}),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,6 +4,19 @@ use std::sync::atomic::AtomicBool;
|
|||
use crate::terminal::TerminalManager;
|
||||
use crate::server::HttpServer;
|
||||
use crate::ngrok::NgrokManager;
|
||||
use crate::cast::CastManager;
|
||||
use crate::tty_forward::TTYForwardManager;
|
||||
use crate::session_monitor::SessionMonitor;
|
||||
use crate::notification_manager::NotificationManager;
|
||||
use crate::welcome::WelcomeManager;
|
||||
use crate::permissions::PermissionsManager;
|
||||
use crate::updater::UpdateManager;
|
||||
use crate::backend_manager::BackendManager;
|
||||
use crate::debug_features::DebugFeaturesManager;
|
||||
use crate::api_testing::APITestingManager;
|
||||
use crate::auth_cache::AuthCacheManager;
|
||||
use crate::terminal_integrations::TerminalIntegrationsManager;
|
||||
use crate::terminal_spawn_service::TerminalSpawnService;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct AppState {
|
||||
|
|
@ -12,16 +25,78 @@ pub struct AppState {
|
|||
pub ngrok_manager: Arc<NgrokManager>,
|
||||
pub server_monitoring: Arc<AtomicBool>,
|
||||
pub server_target_port: Arc<RwLock<Option<u16>>>,
|
||||
pub cast_manager: Arc<CastManager>,
|
||||
pub tty_forward_manager: Arc<TTYForwardManager>,
|
||||
pub session_monitor: Arc<SessionMonitor>,
|
||||
pub notification_manager: Arc<NotificationManager>,
|
||||
pub welcome_manager: Arc<WelcomeManager>,
|
||||
pub permissions_manager: Arc<PermissionsManager>,
|
||||
pub update_manager: Arc<UpdateManager>,
|
||||
pub backend_manager: Arc<BackendManager>,
|
||||
pub debug_features_manager: Arc<DebugFeaturesManager>,
|
||||
pub api_testing_manager: Arc<APITestingManager>,
|
||||
pub auth_cache_manager: Arc<AuthCacheManager>,
|
||||
pub terminal_integrations_manager: Arc<TerminalIntegrationsManager>,
|
||||
pub terminal_spawn_service: Arc<TerminalSpawnService>,
|
||||
}
|
||||
|
||||
impl AppState {
|
||||
pub fn new() -> Self {
|
||||
let mut terminal_manager = TerminalManager::new();
|
||||
let cast_manager = Arc::new(CastManager::new());
|
||||
|
||||
// Connect terminal manager to cast manager
|
||||
terminal_manager.set_cast_manager(cast_manager.clone());
|
||||
|
||||
let terminal_manager = Arc::new(terminal_manager);
|
||||
let session_monitor = Arc::new(SessionMonitor::new(terminal_manager.clone()));
|
||||
let notification_manager = Arc::new(NotificationManager::new());
|
||||
let mut permissions_manager = PermissionsManager::new();
|
||||
permissions_manager.set_notification_manager(notification_manager.clone());
|
||||
|
||||
let current_version = env!("CARGO_PKG_VERSION").to_string();
|
||||
let mut update_manager = UpdateManager::new(current_version);
|
||||
update_manager.set_notification_manager(notification_manager.clone());
|
||||
|
||||
let mut backend_manager = BackendManager::new();
|
||||
backend_manager.set_notification_manager(notification_manager.clone());
|
||||
|
||||
let mut debug_features_manager = DebugFeaturesManager::new();
|
||||
debug_features_manager.set_notification_manager(notification_manager.clone());
|
||||
|
||||
let mut api_testing_manager = APITestingManager::new();
|
||||
api_testing_manager.set_notification_manager(notification_manager.clone());
|
||||
|
||||
let mut auth_cache_manager = AuthCacheManager::new();
|
||||
auth_cache_manager.set_notification_manager(notification_manager.clone());
|
||||
|
||||
let mut terminal_integrations_manager = TerminalIntegrationsManager::new();
|
||||
terminal_integrations_manager.set_notification_manager(notification_manager.clone());
|
||||
|
||||
let terminal_integrations_manager = Arc::new(terminal_integrations_manager);
|
||||
let terminal_spawn_service = Arc::new(TerminalSpawnService::new(
|
||||
terminal_integrations_manager.clone()
|
||||
));
|
||||
|
||||
Self {
|
||||
terminal_manager: Arc::new(TerminalManager::new()),
|
||||
terminal_manager,
|
||||
http_server: Arc::new(RwLock::new(None)),
|
||||
ngrok_manager: Arc::new(NgrokManager::new()),
|
||||
server_monitoring: Arc::new(AtomicBool::new(true)),
|
||||
server_target_port: Arc::new(RwLock::new(None)),
|
||||
cast_manager,
|
||||
tty_forward_manager: Arc::new(TTYForwardManager::new()),
|
||||
session_monitor,
|
||||
notification_manager,
|
||||
welcome_manager: Arc::new(WelcomeManager::new()),
|
||||
permissions_manager: Arc::new(permissions_manager),
|
||||
update_manager: Arc::new(update_manager),
|
||||
backend_manager: Arc::new(backend_manager),
|
||||
debug_features_manager: Arc::new(debug_features_manager),
|
||||
api_testing_manager: Arc::new(api_testing_manager),
|
||||
auth_cache_manager: Arc::new(auth_cache_manager),
|
||||
terminal_integrations_manager,
|
||||
terminal_spawn_service,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -7,10 +7,12 @@ use bytes::Bytes;
|
|||
use uuid::Uuid;
|
||||
use chrono::Utc;
|
||||
use tracing::{info, error, debug};
|
||||
use crate::cast::CastManager;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct TerminalManager {
|
||||
sessions: Arc<RwLock<HashMap<String, Arc<RwLock<TerminalSession>>>>>,
|
||||
cast_manager: Option<Arc<CastManager>>,
|
||||
}
|
||||
|
||||
pub struct TerminalSession {
|
||||
|
|
@ -33,9 +35,14 @@ impl TerminalManager {
|
|||
pub fn new() -> Self {
|
||||
Self {
|
||||
sessions: Arc::new(RwLock::new(HashMap::new())),
|
||||
cast_manager: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_cast_manager(&mut self, cast_manager: Arc<CastManager>) {
|
||||
self.cast_manager = Some(cast_manager);
|
||||
}
|
||||
|
||||
pub async fn create_session(
|
||||
&self,
|
||||
name: String,
|
||||
|
|
@ -107,6 +114,8 @@ impl TerminalManager {
|
|||
|
||||
// Start reader thread
|
||||
let output_tx_clone = output_tx.clone();
|
||||
let cast_manager_clone = self.cast_manager.clone();
|
||||
let session_id_clone = id.clone();
|
||||
let reader_thread = std::thread::spawn(move || {
|
||||
let mut reader = reader;
|
||||
let mut buffer = [0u8; 4096];
|
||||
|
|
@ -119,6 +128,17 @@ impl TerminalManager {
|
|||
}
|
||||
Ok(n) => {
|
||||
let data = Bytes::copy_from_slice(&buffer[..n]);
|
||||
|
||||
// Record output to cast file if recording
|
||||
if let Some(cast_manager) = &cast_manager_clone {
|
||||
let cm = cast_manager.clone();
|
||||
let sid = session_id_clone.clone();
|
||||
let data_clone = data.clone();
|
||||
tokio::spawn(async move {
|
||||
let _ = cm.add_output(&sid, &data_clone).await;
|
||||
});
|
||||
}
|
||||
|
||||
if output_tx_clone.send(data).is_err() {
|
||||
debug!("Output channel closed");
|
||||
break;
|
||||
|
|
@ -201,6 +221,11 @@ impl TerminalManager {
|
|||
let mut sessions = self.sessions.write().await;
|
||||
|
||||
if let Some(session_arc) = sessions.remove(id) {
|
||||
// Stop recording if active
|
||||
if let Some(cast_manager) = &self.cast_manager {
|
||||
let _ = cast_manager.remove_recorder(id).await;
|
||||
}
|
||||
|
||||
// Session will be dropped when it goes out of scope
|
||||
drop(session_arc);
|
||||
|
||||
|
|
@ -228,6 +253,14 @@ impl TerminalManager {
|
|||
session.rows = rows;
|
||||
session.cols = cols;
|
||||
|
||||
// Update recorder dimensions if recording
|
||||
if let Some(cast_manager) = &self.cast_manager {
|
||||
if let Some(recorder) = cast_manager.get_recorder(id).await {
|
||||
let mut rec = recorder.lock().await;
|
||||
rec.resize(cols, rows).await;
|
||||
}
|
||||
}
|
||||
|
||||
debug!("Resized terminal {} to {}x{}", id, cols, rows);
|
||||
Ok(())
|
||||
} else {
|
||||
|
|
@ -239,6 +272,11 @@ impl TerminalManager {
|
|||
if let Some(session_arc) = self.get_session(id).await {
|
||||
let mut session = session_arc.write().await;
|
||||
|
||||
// Record input to cast file if recording
|
||||
if let Some(cast_manager) = &self.cast_manager {
|
||||
let _ = cast_manager.add_input(id, data).await;
|
||||
}
|
||||
|
||||
session.writer
|
||||
.write_all(data)
|
||||
.map_err(|e| format!("Failed to write to PTY: {}", e))?;
|
||||
|
|
|
|||
678
tauri/src-tauri/src/terminal_integrations.rs
Normal file
678
tauri/src-tauri/src/terminal_integrations.rs
Normal file
|
|
@ -0,0 +1,678 @@
|
|||
use serde::{Serialize, Deserialize};
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::RwLock;
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Command;
|
||||
|
||||
/// Terminal emulator type
|
||||
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq, Hash)]
|
||||
pub enum TerminalEmulator {
|
||||
SystemDefault,
|
||||
Terminal, // macOS Terminal.app
|
||||
ITerm2, // iTerm2
|
||||
Hyper, // Hyper
|
||||
Alacritty, // Alacritty
|
||||
Kitty, // Kitty
|
||||
WezTerm, // WezTerm
|
||||
Ghostty, // Ghostty
|
||||
WindowsTerminal, // Windows Terminal
|
||||
ConEmu, // ConEmu
|
||||
Cmder, // Cmder
|
||||
Gnome, // GNOME Terminal
|
||||
Konsole, // KDE Konsole
|
||||
Xterm, // XTerm
|
||||
Custom, // Custom terminal
|
||||
}
|
||||
|
||||
impl TerminalEmulator {
|
||||
pub fn display_name(&self) -> &str {
|
||||
match self {
|
||||
TerminalEmulator::SystemDefault => "System Default",
|
||||
TerminalEmulator::Terminal => "Terminal",
|
||||
TerminalEmulator::ITerm2 => "iTerm2",
|
||||
TerminalEmulator::Hyper => "Hyper",
|
||||
TerminalEmulator::Alacritty => "Alacritty",
|
||||
TerminalEmulator::Kitty => "Kitty",
|
||||
TerminalEmulator::WezTerm => "WezTerm",
|
||||
TerminalEmulator::Ghostty => "Ghostty",
|
||||
TerminalEmulator::WindowsTerminal => "Windows Terminal",
|
||||
TerminalEmulator::ConEmu => "ConEmu",
|
||||
TerminalEmulator::Cmder => "Cmder",
|
||||
TerminalEmulator::Gnome => "GNOME Terminal",
|
||||
TerminalEmulator::Konsole => "Konsole",
|
||||
TerminalEmulator::Xterm => "XTerm",
|
||||
TerminalEmulator::Custom => "Custom",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Terminal integration configuration
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct TerminalConfig {
|
||||
pub emulator: TerminalEmulator,
|
||||
pub name: String,
|
||||
pub executable_path: PathBuf,
|
||||
pub args_template: Vec<String>,
|
||||
pub env_vars: HashMap<String, String>,
|
||||
pub features: TerminalFeatures,
|
||||
pub platform: Vec<String>, // ["macos", "windows", "linux"]
|
||||
}
|
||||
|
||||
/// Terminal features
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct TerminalFeatures {
|
||||
pub supports_tabs: bool,
|
||||
pub supports_splits: bool,
|
||||
pub supports_profiles: bool,
|
||||
pub supports_themes: bool,
|
||||
pub supports_scripting: bool,
|
||||
pub supports_url_scheme: bool,
|
||||
pub supports_remote_control: bool,
|
||||
}
|
||||
|
||||
/// Terminal launch options
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct TerminalLaunchOptions {
|
||||
pub working_directory: Option<PathBuf>,
|
||||
pub command: Option<String>,
|
||||
pub args: Vec<String>,
|
||||
pub env_vars: HashMap<String, String>,
|
||||
pub title: Option<String>,
|
||||
pub profile: Option<String>,
|
||||
pub tab: bool,
|
||||
pub split: Option<SplitDirection>,
|
||||
pub window_size: Option<(u32, u32)>,
|
||||
}
|
||||
|
||||
/// Split direction
|
||||
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
|
||||
pub enum SplitDirection {
|
||||
Horizontal,
|
||||
Vertical,
|
||||
}
|
||||
|
||||
/// Terminal integration info
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct TerminalIntegrationInfo {
|
||||
pub emulator: TerminalEmulator,
|
||||
pub installed: bool,
|
||||
pub version: Option<String>,
|
||||
pub path: Option<PathBuf>,
|
||||
pub is_default: bool,
|
||||
pub config: Option<TerminalConfig>,
|
||||
}
|
||||
|
||||
/// Terminal URL scheme
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct TerminalURLScheme {
|
||||
pub scheme: String,
|
||||
pub supports_ssh: bool,
|
||||
pub supports_local: bool,
|
||||
pub template: String,
|
||||
}
|
||||
|
||||
/// Terminal integrations manager
|
||||
pub struct TerminalIntegrationsManager {
|
||||
configs: Arc<RwLock<HashMap<TerminalEmulator, TerminalConfig>>>,
|
||||
detected_terminals: Arc<RwLock<HashMap<TerminalEmulator, TerminalIntegrationInfo>>>,
|
||||
default_terminal: Arc<RwLock<TerminalEmulator>>,
|
||||
url_schemes: Arc<RwLock<HashMap<TerminalEmulator, TerminalURLScheme>>>,
|
||||
notification_manager: Option<Arc<crate::notification_manager::NotificationManager>>,
|
||||
}
|
||||
|
||||
impl TerminalIntegrationsManager {
|
||||
/// Create a new terminal integrations manager
|
||||
pub fn new() -> Self {
|
||||
let manager = Self {
|
||||
configs: Arc::new(RwLock::new(HashMap::new())),
|
||||
detected_terminals: Arc::new(RwLock::new(HashMap::new())),
|
||||
default_terminal: Arc::new(RwLock::new(TerminalEmulator::SystemDefault)),
|
||||
url_schemes: Arc::new(RwLock::new(HashMap::new())),
|
||||
notification_manager: None,
|
||||
};
|
||||
|
||||
// Initialize default configurations
|
||||
tokio::spawn({
|
||||
let configs = manager.configs.clone();
|
||||
let url_schemes = manager.url_schemes.clone();
|
||||
async move {
|
||||
let default_configs = Self::initialize_default_configs();
|
||||
*configs.write().await = default_configs;
|
||||
|
||||
let default_schemes = Self::initialize_url_schemes();
|
||||
*url_schemes.write().await = default_schemes;
|
||||
}
|
||||
});
|
||||
|
||||
manager
|
||||
}
|
||||
|
||||
/// Set the notification manager
|
||||
pub fn set_notification_manager(&mut self, notification_manager: Arc<crate::notification_manager::NotificationManager>) {
|
||||
self.notification_manager = Some(notification_manager);
|
||||
}
|
||||
|
||||
/// Initialize default terminal configurations
|
||||
fn initialize_default_configs() -> HashMap<TerminalEmulator, TerminalConfig> {
|
||||
let mut configs = HashMap::new();
|
||||
|
||||
// WezTerm configuration
|
||||
configs.insert(TerminalEmulator::WezTerm, TerminalConfig {
|
||||
emulator: TerminalEmulator::WezTerm,
|
||||
name: "WezTerm".to_string(),
|
||||
executable_path: PathBuf::from("/Applications/WezTerm.app/Contents/MacOS/wezterm"),
|
||||
args_template: vec![
|
||||
"start".to_string(),
|
||||
"--cwd".to_string(),
|
||||
"{working_directory}".to_string(),
|
||||
"--".to_string(),
|
||||
"{command}".to_string(),
|
||||
"{args}".to_string(),
|
||||
],
|
||||
env_vars: HashMap::new(),
|
||||
features: TerminalFeatures {
|
||||
supports_tabs: true,
|
||||
supports_splits: true,
|
||||
supports_profiles: true,
|
||||
supports_themes: true,
|
||||
supports_scripting: true,
|
||||
supports_url_scheme: false,
|
||||
supports_remote_control: true,
|
||||
},
|
||||
platform: vec!["macos".to_string(), "windows".to_string(), "linux".to_string()],
|
||||
});
|
||||
|
||||
// Ghostty configuration
|
||||
configs.insert(TerminalEmulator::Ghostty, TerminalConfig {
|
||||
emulator: TerminalEmulator::Ghostty,
|
||||
name: "Ghostty".to_string(),
|
||||
executable_path: PathBuf::from("/Applications/Ghostty.app/Contents/MacOS/ghostty"),
|
||||
args_template: vec![
|
||||
"--working-directory".to_string(),
|
||||
"{working_directory}".to_string(),
|
||||
"--command".to_string(),
|
||||
"{command}".to_string(),
|
||||
"{args}".to_string(),
|
||||
],
|
||||
env_vars: HashMap::new(),
|
||||
features: TerminalFeatures {
|
||||
supports_tabs: true,
|
||||
supports_splits: true,
|
||||
supports_profiles: true,
|
||||
supports_themes: true,
|
||||
supports_scripting: false,
|
||||
supports_url_scheme: false,
|
||||
supports_remote_control: false,
|
||||
},
|
||||
platform: vec!["macos".to_string()],
|
||||
});
|
||||
|
||||
// iTerm2 configuration
|
||||
configs.insert(TerminalEmulator::ITerm2, TerminalConfig {
|
||||
emulator: TerminalEmulator::ITerm2,
|
||||
name: "iTerm2".to_string(),
|
||||
executable_path: PathBuf::from("/Applications/iTerm.app/Contents/MacOS/iTerm2"),
|
||||
args_template: vec![],
|
||||
env_vars: HashMap::new(),
|
||||
features: TerminalFeatures {
|
||||
supports_tabs: true,
|
||||
supports_splits: true,
|
||||
supports_profiles: true,
|
||||
supports_themes: true,
|
||||
supports_scripting: true,
|
||||
supports_url_scheme: true,
|
||||
supports_remote_control: true,
|
||||
},
|
||||
platform: vec!["macos".to_string()],
|
||||
});
|
||||
|
||||
// Alacritty configuration
|
||||
configs.insert(TerminalEmulator::Alacritty, TerminalConfig {
|
||||
emulator: TerminalEmulator::Alacritty,
|
||||
name: "Alacritty".to_string(),
|
||||
executable_path: PathBuf::from("/Applications/Alacritty.app/Contents/MacOS/alacritty"),
|
||||
args_template: vec![
|
||||
"--working-directory".to_string(),
|
||||
"{working_directory}".to_string(),
|
||||
"-e".to_string(),
|
||||
"{command}".to_string(),
|
||||
"{args}".to_string(),
|
||||
],
|
||||
env_vars: HashMap::new(),
|
||||
features: TerminalFeatures {
|
||||
supports_tabs: false,
|
||||
supports_splits: false,
|
||||
supports_profiles: true,
|
||||
supports_themes: true,
|
||||
supports_scripting: false,
|
||||
supports_url_scheme: false,
|
||||
supports_remote_control: false,
|
||||
},
|
||||
platform: vec!["macos".to_string(), "windows".to_string(), "linux".to_string()],
|
||||
});
|
||||
|
||||
// Kitty configuration
|
||||
configs.insert(TerminalEmulator::Kitty, TerminalConfig {
|
||||
emulator: TerminalEmulator::Kitty,
|
||||
name: "Kitty".to_string(),
|
||||
executable_path: PathBuf::from("/Applications/kitty.app/Contents/MacOS/kitty"),
|
||||
args_template: vec![
|
||||
"--directory".to_string(),
|
||||
"{working_directory}".to_string(),
|
||||
"{command}".to_string(),
|
||||
"{args}".to_string(),
|
||||
],
|
||||
env_vars: HashMap::new(),
|
||||
features: TerminalFeatures {
|
||||
supports_tabs: true,
|
||||
supports_splits: true,
|
||||
supports_profiles: true,
|
||||
supports_themes: true,
|
||||
supports_scripting: true,
|
||||
supports_url_scheme: false,
|
||||
supports_remote_control: true,
|
||||
},
|
||||
platform: vec!["macos".to_string(), "linux".to_string()],
|
||||
});
|
||||
|
||||
configs
|
||||
}
|
||||
|
||||
/// Initialize URL schemes
|
||||
fn initialize_url_schemes() -> HashMap<TerminalEmulator, TerminalURLScheme> {
|
||||
let mut schemes = HashMap::new();
|
||||
|
||||
schemes.insert(TerminalEmulator::ITerm2, TerminalURLScheme {
|
||||
scheme: "iterm2".to_string(),
|
||||
supports_ssh: true,
|
||||
supports_local: true,
|
||||
template: "iterm2://ssh/{user}@{host}:{port}".to_string(),
|
||||
});
|
||||
|
||||
schemes
|
||||
}
|
||||
|
||||
/// Detect installed terminals
|
||||
pub async fn detect_terminals(&self) -> Vec<TerminalIntegrationInfo> {
|
||||
let mut detected = Vec::new();
|
||||
let configs = self.configs.read().await;
|
||||
|
||||
for (emulator, config) in configs.iter() {
|
||||
let info = self.check_terminal_installation(emulator, config).await;
|
||||
if info.installed {
|
||||
detected.push(info.clone());
|
||||
self.detected_terminals.write().await.insert(*emulator, info);
|
||||
}
|
||||
}
|
||||
|
||||
// Check system default
|
||||
let default_info = self.detect_system_default().await;
|
||||
detected.insert(0, default_info);
|
||||
|
||||
detected
|
||||
}
|
||||
|
||||
/// Check if a specific terminal is installed
|
||||
async fn check_terminal_installation(&self, emulator: &TerminalEmulator, config: &TerminalConfig) -> TerminalIntegrationInfo {
|
||||
let installed = config.executable_path.exists();
|
||||
let version = if installed {
|
||||
self.get_terminal_version(emulator, &config.executable_path).await
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
TerminalIntegrationInfo {
|
||||
emulator: *emulator,
|
||||
installed,
|
||||
version,
|
||||
path: if installed { Some(config.executable_path.clone()) } else { None },
|
||||
is_default: false,
|
||||
config: if installed { Some(config.clone()) } else { None },
|
||||
}
|
||||
}
|
||||
|
||||
/// Get terminal version
|
||||
async fn get_terminal_version(&self, emulator: &TerminalEmulator, path: &PathBuf) -> Option<String> {
|
||||
match emulator {
|
||||
TerminalEmulator::WezTerm => {
|
||||
Command::new(path)
|
||||
.arg("--version")
|
||||
.output()
|
||||
.ok()
|
||||
.and_then(|output| String::from_utf8(output.stdout).ok())
|
||||
.map(|v| v.trim().to_string())
|
||||
}
|
||||
TerminalEmulator::Alacritty => {
|
||||
Command::new(path)
|
||||
.arg("--version")
|
||||
.output()
|
||||
.ok()
|
||||
.and_then(|output| String::from_utf8(output.stdout).ok())
|
||||
.map(|v| v.trim().to_string())
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Detect system default terminal
|
||||
async fn detect_system_default(&self) -> TerminalIntegrationInfo {
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
TerminalIntegrationInfo {
|
||||
emulator: TerminalEmulator::Terminal,
|
||||
installed: true,
|
||||
version: None,
|
||||
path: Some(PathBuf::from("/System/Applications/Utilities/Terminal.app")),
|
||||
is_default: true,
|
||||
config: None,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
TerminalIntegrationInfo {
|
||||
emulator: TerminalEmulator::WindowsTerminal,
|
||||
installed: true,
|
||||
version: None,
|
||||
path: None,
|
||||
is_default: true,
|
||||
config: None,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
TerminalIntegrationInfo {
|
||||
emulator: TerminalEmulator::Gnome,
|
||||
installed: true,
|
||||
version: None,
|
||||
path: None,
|
||||
is_default: true,
|
||||
config: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Get default terminal
|
||||
pub async fn get_default_terminal(&self) -> TerminalEmulator {
|
||||
*self.default_terminal.read().await
|
||||
}
|
||||
|
||||
/// Set default terminal
|
||||
pub async fn set_default_terminal(&self, emulator: TerminalEmulator) -> Result<(), String> {
|
||||
// Check if terminal is installed
|
||||
let detected = self.detected_terminals.read().await;
|
||||
if emulator != TerminalEmulator::SystemDefault && !detected.contains_key(&emulator) {
|
||||
return Err("Terminal not installed".to_string());
|
||||
}
|
||||
|
||||
*self.default_terminal.write().await = emulator;
|
||||
|
||||
// Notify user
|
||||
if let Some(notification_manager) = &self.notification_manager {
|
||||
let _ = notification_manager.notify_success(
|
||||
"Default Terminal Changed",
|
||||
&format!("Default terminal set to {}", emulator.display_name())
|
||||
).await;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Launch terminal
|
||||
pub async fn launch_terminal(
|
||||
&self,
|
||||
emulator: Option<TerminalEmulator>,
|
||||
options: TerminalLaunchOptions,
|
||||
) -> Result<(), String> {
|
||||
let emulator = emulator.unwrap_or(*self.default_terminal.read().await);
|
||||
|
||||
match emulator {
|
||||
TerminalEmulator::SystemDefault => self.launch_system_terminal(options).await,
|
||||
_ => self.launch_specific_terminal(emulator, options).await,
|
||||
}
|
||||
}
|
||||
|
||||
/// Launch system terminal
|
||||
async fn launch_system_terminal(&self, options: TerminalLaunchOptions) -> Result<(), String> {
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
self.launch_macos_terminal(options).await
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
self.launch_windows_terminal(options).await
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
self.launch_linux_terminal(options).await
|
||||
}
|
||||
}
|
||||
|
||||
/// Launch specific terminal
|
||||
async fn launch_specific_terminal(
|
||||
&self,
|
||||
emulator: TerminalEmulator,
|
||||
options: TerminalLaunchOptions,
|
||||
) -> Result<(), String> {
|
||||
let configs = self.configs.read().await;
|
||||
let config = configs.get(&emulator)
|
||||
.ok_or_else(|| "Terminal configuration not found".to_string())?;
|
||||
|
||||
let mut command = Command::new(&config.executable_path);
|
||||
|
||||
// Build command arguments
|
||||
for arg_template in &config.args_template {
|
||||
let arg = self.replace_template_variables(arg_template, &options);
|
||||
if !arg.is_empty() {
|
||||
command.arg(arg);
|
||||
}
|
||||
}
|
||||
|
||||
// Set environment variables
|
||||
for (key, value) in &config.env_vars {
|
||||
command.env(key, value);
|
||||
}
|
||||
for (key, value) in &options.env_vars {
|
||||
command.env(key, value);
|
||||
}
|
||||
|
||||
// Set working directory
|
||||
if let Some(cwd) = &options.working_directory {
|
||||
command.current_dir(cwd);
|
||||
}
|
||||
|
||||
// Launch terminal
|
||||
command.spawn()
|
||||
.map_err(|e| format!("Failed to launch terminal: {}", e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Launch macOS terminal
|
||||
#[cfg(target_os = "macos")]
|
||||
async fn launch_macos_terminal(&self, options: TerminalLaunchOptions) -> Result<(), String> {
|
||||
use std::process::Command;
|
||||
|
||||
let mut script = String::from("tell application \"Terminal\"\n");
|
||||
script.push_str(" activate\n");
|
||||
|
||||
if options.tab {
|
||||
script.push_str(" tell application \"System Events\" to keystroke \"t\" using command down\n");
|
||||
}
|
||||
|
||||
if let Some(cwd) = options.working_directory {
|
||||
script.push_str(&format!(" do script \"cd '{}'\" in front window\n", cwd.display()));
|
||||
}
|
||||
|
||||
if let Some(command) = options.command {
|
||||
let full_command = if options.args.is_empty() {
|
||||
command
|
||||
} else {
|
||||
format!("{} {}", command, options.args.join(" "))
|
||||
};
|
||||
script.push_str(&format!(" do script \"{}\" in front window\n", full_command));
|
||||
}
|
||||
|
||||
script.push_str("end tell\n");
|
||||
|
||||
Command::new("osascript")
|
||||
.arg("-e")
|
||||
.arg(script)
|
||||
.spawn()
|
||||
.map_err(|e| format!("Failed to launch Terminal: {}", e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Launch Windows terminal
|
||||
#[cfg(target_os = "windows")]
|
||||
async fn launch_windows_terminal(&self, options: TerminalLaunchOptions) -> Result<(), String> {
|
||||
use std::process::Command;
|
||||
|
||||
let mut command = Command::new("wt.exe");
|
||||
|
||||
if let Some(cwd) = options.working_directory {
|
||||
command.args(&["-d", cwd.to_str().unwrap_or(".")]);
|
||||
}
|
||||
|
||||
if options.tab {
|
||||
command.arg("new-tab");
|
||||
}
|
||||
|
||||
if let Some(cmd) = options.command {
|
||||
command.args(&["--", &cmd]);
|
||||
for arg in options.args {
|
||||
command.arg(arg);
|
||||
}
|
||||
}
|
||||
|
||||
command.spawn()
|
||||
.map_err(|e| format!("Failed to launch Windows Terminal: {}", e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Launch Linux terminal
|
||||
#[cfg(target_os = "linux")]
|
||||
async fn launch_linux_terminal(&self, options: TerminalLaunchOptions) -> Result<(), String> {
|
||||
use std::process::Command;
|
||||
|
||||
// Try common terminal emulators
|
||||
let terminals = ["gnome-terminal", "konsole", "xfce4-terminal", "xterm"];
|
||||
|
||||
for terminal in &terminals {
|
||||
if let Ok(output) = Command::new("which").arg(terminal).output() {
|
||||
if output.status.success() {
|
||||
let mut command = Command::new(terminal);
|
||||
|
||||
if let Some(cwd) = &options.working_directory {
|
||||
match *terminal {
|
||||
"gnome-terminal" => {
|
||||
command.arg("--working-directory").arg(cwd);
|
||||
}
|
||||
"konsole" => {
|
||||
command.arg("--workdir").arg(cwd);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(cmd) = &options.command {
|
||||
match *terminal {
|
||||
"gnome-terminal" => {
|
||||
command.arg("--").arg(cmd);
|
||||
}
|
||||
"konsole" => {
|
||||
command.arg("-e").arg(cmd);
|
||||
}
|
||||
_ => {
|
||||
command.arg("-e").arg(cmd);
|
||||
}
|
||||
}
|
||||
for arg in &options.args {
|
||||
command.arg(arg);
|
||||
}
|
||||
}
|
||||
|
||||
return command.spawn()
|
||||
.map_err(|e| format!("Failed to launch terminal: {}", e))
|
||||
.map(|_| ());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Err("No suitable terminal emulator found".to_string())
|
||||
}
|
||||
|
||||
/// Create SSH URL
|
||||
pub async fn create_ssh_url(
|
||||
&self,
|
||||
emulator: TerminalEmulator,
|
||||
user: &str,
|
||||
host: &str,
|
||||
port: u16,
|
||||
) -> Option<String> {
|
||||
let schemes = self.url_schemes.read().await;
|
||||
schemes.get(&emulator).map(|scheme| {
|
||||
scheme.template
|
||||
.replace("{user}", user)
|
||||
.replace("{host}", host)
|
||||
.replace("{port}", &port.to_string())
|
||||
})
|
||||
}
|
||||
|
||||
/// Get terminal configuration
|
||||
pub async fn get_terminal_config(&self, emulator: TerminalEmulator) -> Option<TerminalConfig> {
|
||||
self.configs.read().await.get(&emulator).cloned()
|
||||
}
|
||||
|
||||
/// Update terminal configuration
|
||||
pub async fn update_terminal_config(&self, config: TerminalConfig) {
|
||||
self.configs.write().await.insert(config.emulator, config);
|
||||
}
|
||||
|
||||
/// List detected terminals
|
||||
pub async fn list_detected_terminals(&self) -> Vec<TerminalIntegrationInfo> {
|
||||
self.detected_terminals.read().await.values().cloned().collect()
|
||||
}
|
||||
|
||||
// Helper methods
|
||||
fn replace_template_variables(&self, template: &str, options: &TerminalLaunchOptions) -> String {
|
||||
let mut result = template.to_string();
|
||||
|
||||
if let Some(cwd) = &options.working_directory {
|
||||
result = result.replace("{working_directory}", cwd.to_str().unwrap_or(""));
|
||||
}
|
||||
|
||||
if let Some(command) = &options.command {
|
||||
result = result.replace("{command}", command);
|
||||
}
|
||||
|
||||
result = result.replace("{args}", &options.args.join(" "));
|
||||
|
||||
if let Some(title) = &options.title {
|
||||
result = result.replace("{title}", title);
|
||||
}
|
||||
|
||||
// Remove empty placeholders
|
||||
result = result.replace("{working_directory}", "");
|
||||
result = result.replace("{command}", "");
|
||||
result = result.replace("{args}", "");
|
||||
result = result.replace("{title}", "");
|
||||
|
||||
result.trim().to_string()
|
||||
}
|
||||
}
|
||||
|
||||
/// Terminal integration statistics
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct TerminalIntegrationStats {
|
||||
pub total_terminals: usize,
|
||||
pub installed_terminals: usize,
|
||||
pub default_terminal: TerminalEmulator,
|
||||
pub terminals_by_platform: HashMap<String, Vec<TerminalEmulator>>,
|
||||
}
|
||||
182
tauri/src-tauri/src/terminal_spawn_service.rs
Normal file
182
tauri/src-tauri/src/terminal_spawn_service.rs
Normal file
|
|
@ -0,0 +1,182 @@
|
|||
use tokio::sync::{mpsc, Mutex};
|
||||
use std::sync::Arc;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tauri::Manager;
|
||||
|
||||
/// Request to spawn a terminal
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct TerminalSpawnRequest {
|
||||
pub session_id: String,
|
||||
pub terminal_type: Option<String>,
|
||||
pub command: Option<String>,
|
||||
pub working_directory: Option<String>,
|
||||
pub environment: Option<std::collections::HashMap<String, String>>,
|
||||
}
|
||||
|
||||
/// Response from terminal spawn
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct TerminalSpawnResponse {
|
||||
pub success: bool,
|
||||
pub error: Option<String>,
|
||||
pub terminal_pid: Option<u32>,
|
||||
}
|
||||
|
||||
/// Terminal Spawn Service - manages background terminal spawning
|
||||
pub struct TerminalSpawnService {
|
||||
request_tx: mpsc::Sender<TerminalSpawnRequest>,
|
||||
terminal_integrations_manager: Arc<crate::terminal_integrations::TerminalIntegrationsManager>,
|
||||
}
|
||||
|
||||
impl TerminalSpawnService {
|
||||
pub fn new(
|
||||
terminal_integrations_manager: Arc<crate::terminal_integrations::TerminalIntegrationsManager>,
|
||||
) -> Self {
|
||||
let (tx, mut rx) = mpsc::channel::<TerminalSpawnRequest>(100);
|
||||
|
||||
let manager_clone = terminal_integrations_manager.clone();
|
||||
|
||||
// Spawn background worker to handle terminal spawn requests
|
||||
tokio::spawn(async move {
|
||||
while let Some(request) = rx.recv().await {
|
||||
let manager = manager_clone.clone();
|
||||
tokio::spawn(async move {
|
||||
let _ = Self::handle_spawn_request(request, manager).await;
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
Self {
|
||||
request_tx: tx,
|
||||
terminal_integrations_manager,
|
||||
}
|
||||
}
|
||||
|
||||
/// Queue a terminal spawn request
|
||||
pub async fn spawn_terminal(&self, request: TerminalSpawnRequest) -> Result<(), String> {
|
||||
self.request_tx.send(request).await
|
||||
.map_err(|e| format!("Failed to queue terminal spawn: {}", e))
|
||||
}
|
||||
|
||||
/// Handle a spawn request
|
||||
async fn handle_spawn_request(
|
||||
request: TerminalSpawnRequest,
|
||||
terminal_integrations_manager: Arc<crate::terminal_integrations::TerminalIntegrationsManager>,
|
||||
) -> Result<TerminalSpawnResponse, String> {
|
||||
// Determine which terminal to use
|
||||
let terminal_type = if let Some(terminal) = &request.terminal_type {
|
||||
// Parse terminal type
|
||||
match terminal.as_str() {
|
||||
"Terminal" => crate::terminal_integrations::TerminalEmulator::Terminal,
|
||||
"iTerm2" => crate::terminal_integrations::TerminalEmulator::ITerm2,
|
||||
"Hyper" => crate::terminal_integrations::TerminalEmulator::Hyper,
|
||||
"Alacritty" => crate::terminal_integrations::TerminalEmulator::Alacritty,
|
||||
"Warp" => crate::terminal_integrations::TerminalEmulator::Warp,
|
||||
"Kitty" => crate::terminal_integrations::TerminalEmulator::Kitty,
|
||||
"WezTerm" => crate::terminal_integrations::TerminalEmulator::WezTerm,
|
||||
"Ghostty" => crate::terminal_integrations::TerminalEmulator::Ghostty,
|
||||
_ => terminal_integrations_manager.get_default_terminal().await,
|
||||
}
|
||||
} else {
|
||||
terminal_integrations_manager.get_default_terminal().await
|
||||
};
|
||||
|
||||
// Build launch options
|
||||
let mut launch_options = crate::terminal_integrations::TerminalLaunchOptions {
|
||||
command: request.command,
|
||||
working_directory: request.working_directory,
|
||||
environment: request.environment,
|
||||
title: Some(format!("VibeTunnel Session {}", request.session_id)),
|
||||
wait_for_exit: Some(false),
|
||||
new_window: Some(true),
|
||||
tab_mode: Some(false),
|
||||
profile: None,
|
||||
};
|
||||
|
||||
// If no command specified, create a VibeTunnel session command
|
||||
if launch_options.command.is_none() {
|
||||
// Get server status to build the correct URL
|
||||
let port = 4020; // Default port, should get from settings
|
||||
launch_options.command = Some(format!("vt connect localhost:{}/{}", port, request.session_id));
|
||||
}
|
||||
|
||||
// Launch the terminal
|
||||
match terminal_integrations_manager.launch_terminal(Some(terminal_type), launch_options).await {
|
||||
Ok(_) => Ok(TerminalSpawnResponse {
|
||||
success: true,
|
||||
error: None,
|
||||
terminal_pid: None, // We don't track PIDs in the current implementation
|
||||
}),
|
||||
Err(e) => Ok(TerminalSpawnResponse {
|
||||
success: false,
|
||||
error: Some(e),
|
||||
terminal_pid: None,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
/// Spawn terminal for a specific session
|
||||
pub async fn spawn_terminal_for_session(
|
||||
&self,
|
||||
session_id: String,
|
||||
terminal_type: Option<String>,
|
||||
) -> Result<(), String> {
|
||||
let request = TerminalSpawnRequest {
|
||||
session_id,
|
||||
terminal_type,
|
||||
command: None,
|
||||
working_directory: None,
|
||||
environment: None,
|
||||
};
|
||||
|
||||
self.spawn_terminal(request).await
|
||||
}
|
||||
|
||||
/// Spawn terminal with custom command
|
||||
pub async fn spawn_terminal_with_command(
|
||||
&self,
|
||||
command: String,
|
||||
working_directory: Option<String>,
|
||||
terminal_type: Option<String>,
|
||||
) -> Result<(), String> {
|
||||
let request = TerminalSpawnRequest {
|
||||
session_id: uuid::Uuid::new_v4().to_string(),
|
||||
terminal_type,
|
||||
command: Some(command),
|
||||
working_directory,
|
||||
environment: None,
|
||||
};
|
||||
|
||||
self.spawn_terminal(request).await
|
||||
}
|
||||
}
|
||||
|
||||
// Commands for Tauri
|
||||
#[tauri::command]
|
||||
pub async fn spawn_terminal_for_session(
|
||||
session_id: String,
|
||||
terminal_type: Option<String>,
|
||||
state: tauri::State<'_, crate::state::AppState>,
|
||||
) -> Result<(), String> {
|
||||
let spawn_service = &state.terminal_spawn_service;
|
||||
spawn_service.spawn_terminal_for_session(session_id, terminal_type).await
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn spawn_terminal_with_command(
|
||||
command: String,
|
||||
working_directory: Option<String>,
|
||||
terminal_type: Option<String>,
|
||||
state: tauri::State<'_, crate::state::AppState>,
|
||||
) -> Result<(), String> {
|
||||
let spawn_service = &state.terminal_spawn_service;
|
||||
spawn_service.spawn_terminal_with_command(command, working_directory, terminal_type).await
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn spawn_custom_terminal(
|
||||
request: TerminalSpawnRequest,
|
||||
state: tauri::State<'_, crate::state::AppState>,
|
||||
) -> Result<(), String> {
|
||||
let spawn_service = &state.terminal_spawn_service;
|
||||
spawn_service.spawn_terminal(request).await
|
||||
}
|
||||
377
tauri/src-tauri/src/tty_forward.rs
Normal file
377
tauri/src-tauri/src/tty_forward.rs
Normal file
|
|
@ -0,0 +1,377 @@
|
|||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::{mpsc, RwLock, oneshot};
|
||||
use std::io::{Read, Write};
|
||||
use tokio::net::{TcpListener, TcpStream};
|
||||
use tokio::io::{AsyncReadExt, AsyncWriteExt};
|
||||
use portable_pty::{CommandBuilder, PtySize, native_pty_system};
|
||||
use uuid::Uuid;
|
||||
use tracing::{info, error};
|
||||
use bytes::Bytes;
|
||||
|
||||
/// Represents a forwarded TTY session
|
||||
pub struct ForwardedSession {
|
||||
pub id: String,
|
||||
pub local_port: u16,
|
||||
pub remote_host: String,
|
||||
pub remote_port: u16,
|
||||
pub connected: bool,
|
||||
pub client_count: usize,
|
||||
}
|
||||
|
||||
/// Manages TTY forwarding sessions
|
||||
pub struct TTYForwardManager {
|
||||
sessions: Arc<RwLock<HashMap<String, ForwardedSession>>>,
|
||||
listeners: Arc<RwLock<HashMap<String, oneshot::Sender<()>>>>,
|
||||
}
|
||||
|
||||
impl TTYForwardManager {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
sessions: Arc::new(RwLock::new(HashMap::new())),
|
||||
listeners: Arc::new(RwLock::new(HashMap::new())),
|
||||
}
|
||||
}
|
||||
|
||||
/// Start a TTY forwarding session
|
||||
pub async fn start_forward(
|
||||
&self,
|
||||
local_port: u16,
|
||||
remote_host: String,
|
||||
remote_port: u16,
|
||||
shell: Option<String>,
|
||||
) -> Result<String, String> {
|
||||
let id = Uuid::new_v4().to_string();
|
||||
|
||||
// Create TCP listener
|
||||
let listener = TcpListener::bind(format!("127.0.0.1:{}", local_port))
|
||||
.await
|
||||
.map_err(|e| format!("Failed to bind to port {}: {}", local_port, e))?;
|
||||
|
||||
let actual_port = listener.local_addr()
|
||||
.map_err(|e| format!("Failed to get local address: {}", e))?
|
||||
.port();
|
||||
|
||||
// Create session
|
||||
let session = ForwardedSession {
|
||||
id: id.clone(),
|
||||
local_port: actual_port,
|
||||
remote_host: remote_host.clone(),
|
||||
remote_port,
|
||||
connected: false,
|
||||
client_count: 0,
|
||||
};
|
||||
|
||||
// Store session
|
||||
self.sessions.write().await.insert(id.clone(), session);
|
||||
|
||||
// Create shutdown channel
|
||||
let (shutdown_tx, shutdown_rx) = oneshot::channel();
|
||||
self.listeners.write().await.insert(id.clone(), shutdown_tx);
|
||||
|
||||
// Start listening for connections
|
||||
let sessions = self.sessions.clone();
|
||||
let session_id = id.clone();
|
||||
let shell = shell.unwrap_or_else(|| {
|
||||
std::env::var("SHELL").unwrap_or_else(|_| {
|
||||
if cfg!(target_os = "windows") {
|
||||
"cmd.exe".to_string()
|
||||
} else {
|
||||
"/bin/bash".to_string()
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
tokio::spawn(async move {
|
||||
Self::accept_connections(
|
||||
listener,
|
||||
sessions,
|
||||
session_id,
|
||||
remote_host,
|
||||
remote_port,
|
||||
shell,
|
||||
shutdown_rx,
|
||||
).await;
|
||||
});
|
||||
|
||||
info!("Started TTY forward on port {} (ID: {})", actual_port, id);
|
||||
Ok(id)
|
||||
}
|
||||
|
||||
/// Accept incoming connections and forward them
|
||||
async fn accept_connections(
|
||||
listener: TcpListener,
|
||||
sessions: Arc<RwLock<HashMap<String, ForwardedSession>>>,
|
||||
session_id: String,
|
||||
_remote_host: String,
|
||||
_remote_port: u16,
|
||||
shell: String,
|
||||
mut shutdown_rx: oneshot::Receiver<()>,
|
||||
) {
|
||||
loop {
|
||||
tokio::select! {
|
||||
accept_result = listener.accept() => {
|
||||
match accept_result {
|
||||
Ok((stream, addr)) => {
|
||||
info!("New TTY forward connection from {}", addr);
|
||||
|
||||
// Update client count
|
||||
if let Some(session) = sessions.write().await.get_mut(&session_id) {
|
||||
session.client_count += 1;
|
||||
session.connected = true;
|
||||
}
|
||||
|
||||
// Handle the connection
|
||||
let sessions_clone = sessions.clone();
|
||||
let session_id_clone = session_id.clone();
|
||||
let shell_clone = shell.clone();
|
||||
|
||||
tokio::spawn(async move {
|
||||
if let Err(e) = Self::handle_client(
|
||||
stream,
|
||||
sessions_clone.clone(),
|
||||
session_id_clone.clone(),
|
||||
shell_clone,
|
||||
).await {
|
||||
error!("Error handling TTY forward client: {}", e);
|
||||
}
|
||||
|
||||
// Decrease client count
|
||||
if let Some(session) = sessions_clone.write().await.get_mut(&session_id_clone) {
|
||||
session.client_count = session.client_count.saturating_sub(1);
|
||||
if session.client_count == 0 {
|
||||
session.connected = false;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
Err(e) => {
|
||||
error!("Failed to accept connection: {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
_ = &mut shutdown_rx => {
|
||||
info!("Shutting down TTY forward listener for session {}", session_id);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Handle a single client connection
|
||||
async fn handle_client(
|
||||
stream: TcpStream,
|
||||
_sessions: Arc<RwLock<HashMap<String, ForwardedSession>>>,
|
||||
_session_id: String,
|
||||
shell: String,
|
||||
) -> Result<(), String> {
|
||||
// Set up PTY
|
||||
let pty_system = native_pty_system();
|
||||
let pty_pair = pty_system
|
||||
.openpty(PtySize {
|
||||
rows: 24,
|
||||
cols: 80,
|
||||
pixel_width: 0,
|
||||
pixel_height: 0,
|
||||
})
|
||||
.map_err(|e| format!("Failed to open PTY: {}", e))?;
|
||||
|
||||
// Spawn shell
|
||||
let cmd = CommandBuilder::new(&shell);
|
||||
let child = pty_pair
|
||||
.slave
|
||||
.spawn_command(cmd)
|
||||
.map_err(|e| format!("Failed to spawn shell: {}", e))?;
|
||||
|
||||
// Get reader and writer
|
||||
let mut reader = pty_pair
|
||||
.master
|
||||
.try_clone_reader()
|
||||
.map_err(|e| format!("Failed to clone reader: {}", e))?;
|
||||
|
||||
let mut writer = pty_pair
|
||||
.master
|
||||
.take_writer()
|
||||
.map_err(|e| format!("Failed to take writer: {}", e))?;
|
||||
|
||||
// Create channels for bidirectional communication
|
||||
let (tx_to_pty, mut rx_from_tcp) = mpsc::unbounded_channel::<Bytes>();
|
||||
let (tx_to_tcp, mut rx_from_pty) = mpsc::unbounded_channel::<Bytes>();
|
||||
|
||||
// Split the TCP stream
|
||||
let (mut tcp_reader, mut tcp_writer) = stream.into_split();
|
||||
|
||||
// Task 1: Read from TCP and write to PTY
|
||||
let tcp_to_pty = tokio::spawn(async move {
|
||||
let mut tcp_buf = [0u8; 4096];
|
||||
loop {
|
||||
match tcp_reader.read(&mut tcp_buf).await {
|
||||
Ok(0) => break, // Connection closed
|
||||
Ok(n) => {
|
||||
let data = Bytes::copy_from_slice(&tcp_buf[..n]);
|
||||
if tx_to_pty.send(data).is_err() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
error!("Error reading from TCP: {}", e);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Task 2: Read from PTY and write to TCP
|
||||
let pty_to_tcp = tokio::spawn(async move {
|
||||
while let Some(data) = rx_from_pty.recv().await {
|
||||
if tcp_writer.write_all(&data).await.is_err() {
|
||||
break;
|
||||
}
|
||||
if tcp_writer.flush().await.is_err() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Task 3: PTY reader thread
|
||||
let reader_handle = std::thread::spawn(move || {
|
||||
let mut buffer = [0u8; 4096];
|
||||
loop {
|
||||
match reader.read(&mut buffer) {
|
||||
Ok(0) => break,
|
||||
Ok(n) => {
|
||||
let data = Bytes::copy_from_slice(&buffer[..n]);
|
||||
// Since we're in a thread, we can't use blocking_send on unbounded channel
|
||||
// We'll use a different approach
|
||||
if tx_to_tcp.send(data).is_err() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
error!("Error reading from PTY: {}", e);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Task 4: PTY writer thread
|
||||
let writer_handle = std::thread::spawn(move || {
|
||||
// Create a blocking runtime for the thread
|
||||
let rt = tokio::runtime::Builder::new_current_thread()
|
||||
.enable_all()
|
||||
.build()
|
||||
.unwrap();
|
||||
|
||||
rt.block_on(async {
|
||||
while let Some(data) = rx_from_tcp.recv().await {
|
||||
if writer.write_all(&data).is_err() {
|
||||
break;
|
||||
}
|
||||
if writer.flush().is_err() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// Wait for any task to complete
|
||||
tokio::select! {
|
||||
_ = tcp_to_pty => {},
|
||||
_ = pty_to_tcp => {},
|
||||
}
|
||||
|
||||
// Clean up
|
||||
drop(child);
|
||||
let _ = reader_handle.join();
|
||||
let _ = writer_handle.join();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Stop a TTY forwarding session
|
||||
pub async fn stop_forward(&self, id: &str) -> Result<(), String> {
|
||||
// Remove session
|
||||
self.sessions.write().await.remove(id);
|
||||
|
||||
// Send shutdown signal
|
||||
if let Some(shutdown_tx) = self.listeners.write().await.remove(id) {
|
||||
let _ = shutdown_tx.send(());
|
||||
}
|
||||
|
||||
info!("Stopped TTY forward session: {}", id);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// List all active forwarding sessions
|
||||
pub async fn list_forwards(&self) -> Vec<ForwardedSession> {
|
||||
self.sessions.read().await
|
||||
.values()
|
||||
.map(|s| ForwardedSession {
|
||||
id: s.id.clone(),
|
||||
local_port: s.local_port,
|
||||
remote_host: s.remote_host.clone(),
|
||||
remote_port: s.remote_port,
|
||||
connected: s.connected,
|
||||
client_count: s.client_count,
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get a specific forwarding session
|
||||
pub async fn get_forward(&self, id: &str) -> Option<ForwardedSession> {
|
||||
self.sessions.read().await.get(id).map(|s| ForwardedSession {
|
||||
id: s.id.clone(),
|
||||
local_port: s.local_port,
|
||||
remote_host: s.remote_host.clone(),
|
||||
remote_port: s.remote_port,
|
||||
connected: s.connected,
|
||||
client_count: s.client_count,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// HTTP endpoint handler for terminal spawn requests
|
||||
pub async fn handle_terminal_spawn(
|
||||
port: u16,
|
||||
_shell: Option<String>,
|
||||
) -> Result<(), String> {
|
||||
// Listen for HTTP requests on the specified port
|
||||
let listener = TcpListener::bind(format!("127.0.0.1:{}", port))
|
||||
.await
|
||||
.map_err(|e| format!("Failed to bind spawn listener: {}", e))?;
|
||||
|
||||
info!("Terminal spawn service listening on port {}", port);
|
||||
|
||||
loop {
|
||||
let (stream, addr) = listener.accept()
|
||||
.await
|
||||
.map_err(|e| format!("Failed to accept spawn connection: {}", e))?;
|
||||
|
||||
info!("Terminal spawn request from {}", addr);
|
||||
|
||||
// Handle the spawn request
|
||||
tokio::spawn(async move {
|
||||
if let Err(e) = handle_spawn_request(stream, None).await {
|
||||
error!("Error handling spawn request: {}", e);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/// Handle a single terminal spawn request
|
||||
async fn handle_spawn_request(
|
||||
mut stream: TcpStream,
|
||||
_shell: Option<String>,
|
||||
) -> Result<(), String> {
|
||||
// Simple HTTP response
|
||||
let response = b"HTTP/1.1 200 OK\r\nContent-Type: text/plain\r\n\r\nTerminal spawned\r\n";
|
||||
stream.write_all(response)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to write response: {}", e))?;
|
||||
|
||||
// TODO: Implement actual terminal spawning logic
|
||||
// This would integrate with the system's terminal emulator
|
||||
|
||||
Ok(())
|
||||
}
|
||||
523
tauri/src-tauri/src/updater.rs
Normal file
523
tauri/src-tauri/src/updater.rs
Normal file
|
|
@ -0,0 +1,523 @@
|
|||
use serde::{Serialize, Deserialize};
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::RwLock;
|
||||
use chrono::{DateTime, Utc, TimeZone};
|
||||
use tauri::{AppHandle, Emitter};
|
||||
use tauri_plugin_updater::UpdaterExt;
|
||||
|
||||
/// Update channel type
|
||||
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq, Hash)]
|
||||
pub enum UpdateChannel {
|
||||
Stable,
|
||||
Beta,
|
||||
Nightly,
|
||||
Custom,
|
||||
}
|
||||
|
||||
impl UpdateChannel {
|
||||
pub fn as_str(&self) -> &str {
|
||||
match self {
|
||||
UpdateChannel::Stable => "stable",
|
||||
UpdateChannel::Beta => "beta",
|
||||
UpdateChannel::Nightly => "nightly",
|
||||
UpdateChannel::Custom => "custom",
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_str(s: &str) -> Self {
|
||||
match s.to_lowercase().as_str() {
|
||||
"stable" => UpdateChannel::Stable,
|
||||
"beta" => UpdateChannel::Beta,
|
||||
"nightly" => UpdateChannel::Nightly,
|
||||
_ => UpdateChannel::Custom,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Update status
|
||||
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)]
|
||||
pub enum UpdateStatus {
|
||||
Idle,
|
||||
Checking,
|
||||
Available,
|
||||
Downloading,
|
||||
Ready,
|
||||
Installing,
|
||||
Error,
|
||||
NoUpdate,
|
||||
}
|
||||
|
||||
/// Update information
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct UpdateInfo {
|
||||
pub version: String,
|
||||
pub notes: String,
|
||||
pub pub_date: Option<DateTime<Utc>>,
|
||||
pub download_size: Option<u64>,
|
||||
pub signature: Option<String>,
|
||||
pub download_url: String,
|
||||
pub channel: UpdateChannel,
|
||||
}
|
||||
|
||||
/// Update progress
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct UpdateProgress {
|
||||
pub downloaded: u64,
|
||||
pub total: u64,
|
||||
pub percentage: f32,
|
||||
pub bytes_per_second: Option<u64>,
|
||||
pub eta_seconds: Option<u64>,
|
||||
}
|
||||
|
||||
/// Update settings
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct UpdaterSettings {
|
||||
pub channel: UpdateChannel,
|
||||
pub check_on_startup: bool,
|
||||
pub check_interval_hours: u32,
|
||||
pub auto_download: bool,
|
||||
pub auto_install: bool,
|
||||
pub show_release_notes: bool,
|
||||
pub include_pre_releases: bool,
|
||||
pub custom_endpoint: Option<String>,
|
||||
pub proxy: Option<String>,
|
||||
}
|
||||
|
||||
impl Default for UpdaterSettings {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
channel: UpdateChannel::Stable,
|
||||
check_on_startup: true,
|
||||
check_interval_hours: 24,
|
||||
auto_download: false,
|
||||
auto_install: false,
|
||||
show_release_notes: true,
|
||||
include_pre_releases: false,
|
||||
custom_endpoint: None,
|
||||
proxy: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Update manager state
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct UpdateState {
|
||||
pub status: UpdateStatus,
|
||||
pub current_version: String,
|
||||
pub available_update: Option<UpdateInfo>,
|
||||
pub progress: Option<UpdateProgress>,
|
||||
pub last_check: Option<DateTime<Utc>>,
|
||||
pub last_error: Option<String>,
|
||||
pub update_history: Vec<UpdateHistoryEntry>,
|
||||
}
|
||||
|
||||
/// Update history entry
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct UpdateHistoryEntry {
|
||||
pub version: String,
|
||||
pub from_version: String,
|
||||
pub channel: UpdateChannel,
|
||||
pub installed_at: DateTime<Utc>,
|
||||
pub success: bool,
|
||||
pub notes: Option<String>,
|
||||
}
|
||||
|
||||
/// Update manager
|
||||
pub struct UpdateManager {
|
||||
app_handle: Arc<RwLock<Option<AppHandle>>>,
|
||||
settings: Arc<RwLock<UpdaterSettings>>,
|
||||
state: Arc<RwLock<UpdateState>>,
|
||||
notification_manager: Option<Arc<crate::notification_manager::NotificationManager>>,
|
||||
}
|
||||
|
||||
impl UpdateManager {
|
||||
/// Create a new update manager
|
||||
pub fn new(current_version: String) -> Self {
|
||||
Self {
|
||||
app_handle: Arc::new(RwLock::new(None)),
|
||||
settings: Arc::new(RwLock::new(UpdaterSettings::default())),
|
||||
state: Arc::new(RwLock::new(UpdateState {
|
||||
status: UpdateStatus::Idle,
|
||||
current_version,
|
||||
available_update: None,
|
||||
progress: None,
|
||||
last_check: None,
|
||||
last_error: None,
|
||||
update_history: Vec::new(),
|
||||
})),
|
||||
notification_manager: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Set the app handle
|
||||
pub async fn set_app_handle(&self, app_handle: AppHandle) {
|
||||
*self.app_handle.write().await = Some(app_handle);
|
||||
}
|
||||
|
||||
/// Set the notification manager
|
||||
pub fn set_notification_manager(&mut self, notification_manager: Arc<crate::notification_manager::NotificationManager>) {
|
||||
self.notification_manager = Some(notification_manager);
|
||||
}
|
||||
|
||||
/// Load settings from configuration
|
||||
pub async fn load_settings(&self) -> Result<(), String> {
|
||||
if let Ok(settings) = crate::settings::Settings::load() {
|
||||
if let Some(update_settings) = settings.updates {
|
||||
let mut updater_settings = self.settings.write().await;
|
||||
updater_settings.channel = UpdateChannel::from_str(&update_settings.channel);
|
||||
updater_settings.check_on_startup = true;
|
||||
updater_settings.check_interval_hours = match update_settings.check_frequency.as_str() {
|
||||
"daily" => 24,
|
||||
"weekly" => 168,
|
||||
"monthly" => 720,
|
||||
_ => 24,
|
||||
};
|
||||
updater_settings.auto_download = update_settings.auto_download;
|
||||
updater_settings.auto_install = update_settings.auto_install;
|
||||
updater_settings.show_release_notes = update_settings.show_release_notes;
|
||||
updater_settings.include_pre_releases = update_settings.include_pre_releases;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get update settings
|
||||
pub async fn get_settings(&self) -> UpdaterSettings {
|
||||
self.settings.read().await.clone()
|
||||
}
|
||||
|
||||
/// Update settings
|
||||
pub async fn update_settings(&self, settings: UpdaterSettings) -> Result<(), String> {
|
||||
*self.settings.write().await = settings.clone();
|
||||
|
||||
// Save to persistent settings
|
||||
if let Ok(mut app_settings) = crate::settings::Settings::load() {
|
||||
app_settings.updates = Some(crate::settings::UpdateSettings {
|
||||
channel: settings.channel.as_str().to_string(),
|
||||
check_frequency: match settings.check_interval_hours {
|
||||
1..=23 => "daily".to_string(),
|
||||
24..=167 => "daily".to_string(),
|
||||
168..=719 => "weekly".to_string(),
|
||||
_ => "monthly".to_string(),
|
||||
},
|
||||
auto_download: settings.auto_download,
|
||||
auto_install: settings.auto_install,
|
||||
show_release_notes: settings.show_release_notes,
|
||||
include_pre_releases: settings.include_pre_releases,
|
||||
});
|
||||
app_settings.save()?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get current update state
|
||||
pub async fn get_state(&self) -> UpdateState {
|
||||
self.state.read().await.clone()
|
||||
}
|
||||
|
||||
/// Check for updates
|
||||
pub async fn check_for_updates(&self) -> Result<Option<UpdateInfo>, String> {
|
||||
// Update status
|
||||
{
|
||||
let mut state = self.state.write().await;
|
||||
state.status = UpdateStatus::Checking;
|
||||
state.last_error = None;
|
||||
}
|
||||
|
||||
// Emit checking event
|
||||
self.emit_update_event("checking", None).await;
|
||||
|
||||
let app_handle_guard = self.app_handle.read().await;
|
||||
let app_handle = app_handle_guard.as_ref()
|
||||
.ok_or_else(|| "App handle not set".to_string())?;
|
||||
|
||||
// Get the updater instance
|
||||
let updater = app_handle.updater_builder();
|
||||
|
||||
// Configure updater based on settings
|
||||
let settings = self.settings.read().await;
|
||||
|
||||
// Build updater with channel-specific endpoint
|
||||
let updater_result = match settings.channel {
|
||||
UpdateChannel::Stable => updater.endpoints(vec![
|
||||
"https://releases.vibetunnel.com/stable/{{target}}/{{arch}}/{{current_version}}".parse().unwrap()
|
||||
]),
|
||||
UpdateChannel::Beta => updater.endpoints(vec![
|
||||
"https://releases.vibetunnel.com/beta/{{target}}/{{arch}}/{{current_version}}".parse().unwrap()
|
||||
]),
|
||||
UpdateChannel::Nightly => updater.endpoints(vec![
|
||||
"https://releases.vibetunnel.com/nightly/{{target}}/{{arch}}/{{current_version}}".parse().unwrap()
|
||||
]),
|
||||
UpdateChannel::Custom => {
|
||||
if let Some(endpoint) = &settings.custom_endpoint {
|
||||
match endpoint.parse() {
|
||||
Ok(url) => updater.endpoints(vec![url]),
|
||||
Err(_) => return Err("Invalid custom endpoint URL".to_string()),
|
||||
}
|
||||
} else {
|
||||
return Err("Custom endpoint not configured".to_string());
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Build and check
|
||||
match updater_result {
|
||||
Ok(updater_builder) => match updater_builder.build() {
|
||||
Ok(updater) => {
|
||||
match updater.check().await {
|
||||
Ok(Some(update)) => {
|
||||
let update_info = UpdateInfo {
|
||||
version: update.version.clone(),
|
||||
notes: update.body.clone().unwrap_or_default(),
|
||||
pub_date: update.date.map(|d| Utc.timestamp_opt(d.unix_timestamp(), 0).single().unwrap_or(Utc::now())),
|
||||
download_size: None, // TODO: Get from update
|
||||
signature: None,
|
||||
download_url: String::new(), // Will be set by updater
|
||||
channel: settings.channel,
|
||||
};
|
||||
|
||||
// Update state
|
||||
{
|
||||
let mut state = self.state.write().await;
|
||||
state.status = UpdateStatus::Available;
|
||||
state.available_update = Some(update_info.clone());
|
||||
state.last_check = Some(Utc::now());
|
||||
}
|
||||
|
||||
// Emit available event
|
||||
self.emit_update_event("available", Some(&update_info)).await;
|
||||
|
||||
// Show notification
|
||||
if let Some(notification_manager) = &self.notification_manager {
|
||||
let _ = notification_manager.notify_update_available(
|
||||
&update_info.version,
|
||||
&update_info.download_url
|
||||
).await;
|
||||
}
|
||||
|
||||
// Auto-download if enabled
|
||||
if settings.auto_download {
|
||||
let _ = self.download_update().await;
|
||||
}
|
||||
|
||||
Ok(Some(update_info))
|
||||
}
|
||||
Ok(None) => {
|
||||
// No update available
|
||||
let mut state = self.state.write().await;
|
||||
state.status = UpdateStatus::NoUpdate;
|
||||
state.last_check = Some(Utc::now());
|
||||
|
||||
self.emit_update_event("no-update", None).await;
|
||||
|
||||
Ok(None)
|
||||
}
|
||||
Err(e) => {
|
||||
let error_msg = format!("Failed to check for updates: {}", e);
|
||||
|
||||
let mut state = self.state.write().await;
|
||||
state.status = UpdateStatus::Error;
|
||||
state.last_error = Some(error_msg.clone());
|
||||
state.last_check = Some(Utc::now());
|
||||
|
||||
self.emit_update_event("error", None).await;
|
||||
|
||||
Err(error_msg)
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
let error_msg = format!("Failed to build updater: {}", e);
|
||||
|
||||
let mut state = self.state.write().await;
|
||||
state.status = UpdateStatus::Error;
|
||||
state.last_error = Some(error_msg.clone());
|
||||
|
||||
Err(error_msg)
|
||||
}
|
||||
},
|
||||
Err(e) => {
|
||||
let error_msg = format!("Failed to configure updater endpoints: {}", e);
|
||||
|
||||
let mut state = self.state.write().await;
|
||||
state.status = UpdateStatus::Error;
|
||||
state.last_error = Some(error_msg.clone());
|
||||
|
||||
Err(error_msg)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Download update
|
||||
pub async fn download_update(&self) -> Result<(), String> {
|
||||
let update_available = {
|
||||
let state = self.state.read().await;
|
||||
state.available_update.is_some()
|
||||
};
|
||||
|
||||
if !update_available {
|
||||
return Err("No update available to download".to_string());
|
||||
}
|
||||
|
||||
// Update status
|
||||
{
|
||||
let mut state = self.state.write().await;
|
||||
state.status = UpdateStatus::Downloading;
|
||||
state.progress = Some(UpdateProgress {
|
||||
downloaded: 0,
|
||||
total: 0,
|
||||
percentage: 0.0,
|
||||
bytes_per_second: None,
|
||||
eta_seconds: None,
|
||||
});
|
||||
}
|
||||
|
||||
self.emit_update_event("downloading", None).await;
|
||||
|
||||
// TODO: Implement actual download with progress tracking
|
||||
// For now, simulate download completion
|
||||
tokio::time::sleep(tokio::time::Duration::from_secs(2)).await;
|
||||
|
||||
// Update status to ready
|
||||
{
|
||||
let mut state = self.state.write().await;
|
||||
state.status = UpdateStatus::Ready;
|
||||
state.progress = None;
|
||||
}
|
||||
|
||||
self.emit_update_event("ready", None).await;
|
||||
|
||||
// Auto-install if enabled
|
||||
let settings = self.settings.read().await;
|
||||
if settings.auto_install {
|
||||
let _ = self.install_update().await;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Install update
|
||||
pub async fn install_update(&self) -> Result<(), String> {
|
||||
let update_info = {
|
||||
let state = self.state.read().await;
|
||||
if state.status != UpdateStatus::Ready {
|
||||
return Err("Update not ready for installation".to_string());
|
||||
}
|
||||
state.available_update.clone()
|
||||
};
|
||||
|
||||
let update_info = update_info.ok_or_else(|| "No update available".to_string())?;
|
||||
|
||||
// Update status
|
||||
{
|
||||
let mut state = self.state.write().await;
|
||||
state.status = UpdateStatus::Installing;
|
||||
}
|
||||
|
||||
self.emit_update_event("installing", None).await;
|
||||
|
||||
// Add to history
|
||||
{
|
||||
let mut state = self.state.write().await;
|
||||
let from_version = state.current_version.clone();
|
||||
state.update_history.push(UpdateHistoryEntry {
|
||||
version: update_info.version.clone(),
|
||||
from_version,
|
||||
channel: update_info.channel,
|
||||
installed_at: Utc::now(),
|
||||
success: true,
|
||||
notes: Some(update_info.notes.clone()),
|
||||
});
|
||||
}
|
||||
|
||||
// TODO: Implement actual installation
|
||||
// For now, return success
|
||||
|
||||
self.emit_update_event("installed", None).await;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Cancel update
|
||||
pub async fn cancel_update(&self) -> Result<(), String> {
|
||||
let mut state = self.state.write().await;
|
||||
|
||||
match state.status {
|
||||
UpdateStatus::Downloading => {
|
||||
// TODO: Cancel download
|
||||
state.status = UpdateStatus::Available;
|
||||
state.progress = None;
|
||||
Ok(())
|
||||
}
|
||||
_ => Err("No update in progress to cancel".to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
/// Switch update channel
|
||||
pub async fn switch_channel(&self, channel: UpdateChannel) -> Result<(), String> {
|
||||
let mut settings = self.settings.write().await;
|
||||
settings.channel = channel;
|
||||
drop(settings);
|
||||
|
||||
// Save settings
|
||||
self.update_settings(self.get_settings().await).await?;
|
||||
|
||||
// Clear current update info when switching channels
|
||||
let mut state = self.state.write().await;
|
||||
state.available_update = None;
|
||||
state.status = UpdateStatus::Idle;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get update history
|
||||
pub async fn get_update_history(&self, limit: Option<usize>) -> Vec<UpdateHistoryEntry> {
|
||||
let state = self.state.read().await;
|
||||
match limit {
|
||||
Some(l) => state.update_history.iter().rev().take(l).cloned().collect(),
|
||||
None => state.update_history.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Start automatic update checking
|
||||
pub async fn start_auto_check(self: Arc<Self>) {
|
||||
let settings = self.settings.read().await;
|
||||
if !settings.check_on_startup {
|
||||
return;
|
||||
}
|
||||
|
||||
let check_interval = std::time::Duration::from_secs(settings.check_interval_hours as u64 * 3600);
|
||||
drop(settings);
|
||||
|
||||
tokio::spawn(async move {
|
||||
loop {
|
||||
let _ = self.check_for_updates().await;
|
||||
tokio::time::sleep(check_interval).await;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/// Emit update event
|
||||
async fn emit_update_event(&self, event_type: &str, update_info: Option<&UpdateInfo>) {
|
||||
if let Some(app_handle) = self.app_handle.read().await.as_ref() {
|
||||
let event_data = serde_json::json!({
|
||||
"type": event_type,
|
||||
"update": update_info,
|
||||
"state": self.get_state().await,
|
||||
});
|
||||
|
||||
let _ = app_handle.emit("updater:event", event_data);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Update check result
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct UpdateCheckResult {
|
||||
pub available: bool,
|
||||
pub current_version: String,
|
||||
pub latest_version: Option<String>,
|
||||
pub channel: UpdateChannel,
|
||||
pub checked_at: DateTime<Utc>,
|
||||
}
|
||||
459
tauri/src-tauri/src/welcome.rs
Normal file
459
tauri/src-tauri/src/welcome.rs
Normal file
|
|
@ -0,0 +1,459 @@
|
|||
use serde::{Serialize, Deserialize};
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::RwLock;
|
||||
use std::collections::HashMap;
|
||||
use chrono::{DateTime, Utc};
|
||||
use tauri::{AppHandle, Manager, Emitter};
|
||||
|
||||
/// Tutorial step structure
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct TutorialStep {
|
||||
pub id: String,
|
||||
pub title: String,
|
||||
pub description: String,
|
||||
pub content: String,
|
||||
pub action: Option<TutorialAction>,
|
||||
pub completed: bool,
|
||||
pub order: u32,
|
||||
}
|
||||
|
||||
/// Tutorial action that can be triggered
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct TutorialAction {
|
||||
pub action_type: String,
|
||||
pub payload: HashMap<String, serde_json::Value>,
|
||||
}
|
||||
|
||||
/// Welcome state tracking
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct WelcomeState {
|
||||
pub first_launch: bool,
|
||||
pub tutorial_completed: bool,
|
||||
pub tutorial_skipped: bool,
|
||||
pub completed_steps: Vec<String>,
|
||||
pub last_seen_version: Option<String>,
|
||||
pub onboarding_date: Option<DateTime<Utc>>,
|
||||
}
|
||||
|
||||
impl Default for WelcomeState {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
first_launch: true,
|
||||
tutorial_completed: false,
|
||||
tutorial_skipped: false,
|
||||
completed_steps: Vec::new(),
|
||||
last_seen_version: None,
|
||||
onboarding_date: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Tutorial category
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct TutorialCategory {
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
pub description: String,
|
||||
pub icon: String,
|
||||
pub steps: Vec<TutorialStep>,
|
||||
}
|
||||
|
||||
/// Welcome manager
|
||||
pub struct WelcomeManager {
|
||||
state: Arc<RwLock<WelcomeState>>,
|
||||
tutorials: Arc<RwLock<Vec<TutorialCategory>>>,
|
||||
app_handle: Arc<RwLock<Option<AppHandle>>>,
|
||||
}
|
||||
|
||||
impl WelcomeManager {
|
||||
/// Create a new welcome manager
|
||||
pub fn new() -> Self {
|
||||
let manager = Self {
|
||||
state: Arc::new(RwLock::new(WelcomeState::default())),
|
||||
tutorials: Arc::new(RwLock::new(Vec::new())),
|
||||
app_handle: Arc::new(RwLock::new(None)),
|
||||
};
|
||||
|
||||
// Initialize default tutorials
|
||||
tokio::spawn({
|
||||
let tutorials = manager.tutorials.clone();
|
||||
async move {
|
||||
let default_tutorials = Self::create_default_tutorials();
|
||||
*tutorials.write().await = default_tutorials;
|
||||
}
|
||||
});
|
||||
|
||||
manager
|
||||
}
|
||||
|
||||
/// Set the app handle
|
||||
pub async fn set_app_handle(&self, app_handle: AppHandle) {
|
||||
*self.app_handle.write().await = Some(app_handle);
|
||||
}
|
||||
|
||||
/// Load welcome state from storage
|
||||
pub async fn load_state(&self) -> Result<(), String> {
|
||||
// Try to load from settings or local storage
|
||||
if let Ok(settings) = crate::settings::Settings::load() {
|
||||
// Check if this is first launch based on settings
|
||||
let mut state = self.state.write().await;
|
||||
state.first_launch = settings.general.show_welcome_on_startup.unwrap_or(true);
|
||||
|
||||
// Mark first launch as false for next time
|
||||
if state.first_launch {
|
||||
state.onboarding_date = Some(Utc::now());
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Save welcome state
|
||||
pub async fn save_state(&self) -> Result<(), String> {
|
||||
let state = self.state.read().await;
|
||||
|
||||
// Update settings to reflect welcome state
|
||||
if let Ok(mut settings) = crate::settings::Settings::load() {
|
||||
settings.general.show_welcome_on_startup = Some(!state.tutorial_completed && !state.tutorial_skipped);
|
||||
settings.save().map_err(|e| e.to_string())?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Check if should show welcome screen
|
||||
pub async fn should_show_welcome(&self) -> bool {
|
||||
let state = self.state.read().await;
|
||||
state.first_launch && !state.tutorial_completed && !state.tutorial_skipped
|
||||
}
|
||||
|
||||
/// Get current welcome state
|
||||
pub async fn get_state(&self) -> WelcomeState {
|
||||
self.state.read().await.clone()
|
||||
}
|
||||
|
||||
/// Get all tutorial categories
|
||||
pub async fn get_tutorials(&self) -> Vec<TutorialCategory> {
|
||||
self.tutorials.read().await.clone()
|
||||
}
|
||||
|
||||
/// Get specific tutorial category
|
||||
pub async fn get_tutorial_category(&self, category_id: &str) -> Option<TutorialCategory> {
|
||||
self.tutorials.read().await
|
||||
.iter()
|
||||
.find(|c| c.id == category_id)
|
||||
.cloned()
|
||||
}
|
||||
|
||||
/// Complete a tutorial step
|
||||
pub async fn complete_step(&self, step_id: &str) -> Result<(), String> {
|
||||
let mut state = self.state.write().await;
|
||||
|
||||
if !state.completed_steps.contains(&step_id.to_string()) {
|
||||
state.completed_steps.push(step_id.to_string());
|
||||
|
||||
// Check if all steps are completed
|
||||
let tutorials = self.tutorials.read().await;
|
||||
let total_steps: usize = tutorials.iter()
|
||||
.map(|c| c.steps.len())
|
||||
.sum();
|
||||
|
||||
if state.completed_steps.len() >= total_steps {
|
||||
state.tutorial_completed = true;
|
||||
}
|
||||
|
||||
// Save state
|
||||
drop(state);
|
||||
drop(tutorials);
|
||||
self.save_state().await?;
|
||||
|
||||
// Emit progress event
|
||||
if let Some(app_handle) = self.app_handle.read().await.as_ref() {
|
||||
let _ = app_handle.emit("tutorial:step_completed", step_id);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Skip tutorial
|
||||
pub async fn skip_tutorial(&self) -> Result<(), String> {
|
||||
let mut state = self.state.write().await;
|
||||
state.tutorial_skipped = true;
|
||||
state.first_launch = false;
|
||||
drop(state);
|
||||
|
||||
self.save_state().await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Reset tutorial progress
|
||||
pub async fn reset_tutorial(&self) -> Result<(), String> {
|
||||
let mut state = self.state.write().await;
|
||||
state.completed_steps.clear();
|
||||
state.tutorial_completed = false;
|
||||
state.tutorial_skipped = false;
|
||||
drop(state);
|
||||
|
||||
self.save_state().await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Show welcome window
|
||||
pub async fn show_welcome_window(&self) -> Result<(), String> {
|
||||
if let Some(app_handle) = self.app_handle.read().await.as_ref() {
|
||||
// Check if welcome window already exists
|
||||
if let Some(window) = app_handle.get_webview_window("welcome") {
|
||||
window.show().map_err(|e| e.to_string())?;
|
||||
window.set_focus().map_err(|e| e.to_string())?;
|
||||
} else {
|
||||
// Create new welcome window
|
||||
tauri::WebviewWindowBuilder::new(
|
||||
app_handle,
|
||||
"welcome",
|
||||
tauri::WebviewUrl::App("welcome.html".into())
|
||||
)
|
||||
.title("Welcome to VibeTunnel")
|
||||
.inner_size(800.0, 600.0)
|
||||
.center()
|
||||
.resizable(false)
|
||||
.build()
|
||||
.map_err(|e| e.to_string())?;
|
||||
}
|
||||
} else {
|
||||
return Err("App handle not set".to_string());
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Create default tutorial content
|
||||
fn create_default_tutorials() -> Vec<TutorialCategory> {
|
||||
vec![
|
||||
TutorialCategory {
|
||||
id: "getting_started".to_string(),
|
||||
name: "Getting Started".to_string(),
|
||||
description: "Learn the basics of VibeTunnel".to_string(),
|
||||
icon: "🚀".to_string(),
|
||||
steps: vec![
|
||||
TutorialStep {
|
||||
id: "welcome".to_string(),
|
||||
title: "Welcome to VibeTunnel".to_string(),
|
||||
description: "Your powerful terminal session manager".to_string(),
|
||||
content: r#"VibeTunnel lets you create, manage, and share terminal sessions with ease.
|
||||
|
||||
Key features:
|
||||
• Create multiple terminal sessions
|
||||
• Share sessions via web interface
|
||||
• Record terminal sessions
|
||||
• Secure remote access with ngrok
|
||||
• Cross-platform support"#.to_string(),
|
||||
action: None,
|
||||
completed: false,
|
||||
order: 1,
|
||||
},
|
||||
TutorialStep {
|
||||
id: "create_session".to_string(),
|
||||
title: "Creating Your First Session".to_string(),
|
||||
description: "Learn how to create a terminal session".to_string(),
|
||||
content: r#"To create a new terminal session:
|
||||
|
||||
1. Click the "New Terminal" button
|
||||
2. Choose your preferred shell
|
||||
3. Set the session name (optional)
|
||||
4. Click "Create"
|
||||
|
||||
Your session will appear in the sidebar."#.to_string(),
|
||||
action: Some(TutorialAction {
|
||||
action_type: "create_terminal".to_string(),
|
||||
payload: HashMap::new(),
|
||||
}),
|
||||
completed: false,
|
||||
order: 2,
|
||||
},
|
||||
TutorialStep {
|
||||
id: "start_server".to_string(),
|
||||
title: "Starting the Web Server".to_string(),
|
||||
description: "Share your sessions via web interface".to_string(),
|
||||
content: r#"The web server lets you access your terminals from any browser:
|
||||
|
||||
1. Click "Start Server" in the toolbar
|
||||
2. Choose your access mode:
|
||||
• Localhost - Access only from this machine
|
||||
• Network - Access from your local network
|
||||
• Ngrok - Access from anywhere (requires auth token)
|
||||
3. Share the URL with others or access it yourself"#.to_string(),
|
||||
action: Some(TutorialAction {
|
||||
action_type: "start_server".to_string(),
|
||||
payload: HashMap::new(),
|
||||
}),
|
||||
completed: false,
|
||||
order: 3,
|
||||
},
|
||||
],
|
||||
},
|
||||
TutorialCategory {
|
||||
id: "advanced_features".to_string(),
|
||||
name: "Advanced Features".to_string(),
|
||||
description: "Discover powerful features".to_string(),
|
||||
icon: "⚡".to_string(),
|
||||
steps: vec![
|
||||
TutorialStep {
|
||||
id: "recording".to_string(),
|
||||
title: "Recording Sessions".to_string(),
|
||||
description: "Record and replay terminal sessions".to_string(),
|
||||
content: r#"Record your terminal sessions in Asciinema format:
|
||||
|
||||
1. Right-click on a session
|
||||
2. Select "Start Recording"
|
||||
3. Perform your terminal tasks
|
||||
4. Stop recording when done
|
||||
5. Save or share the recording
|
||||
|
||||
Recordings can be played back later or shared with others."#.to_string(),
|
||||
action: None,
|
||||
completed: false,
|
||||
order: 1,
|
||||
},
|
||||
TutorialStep {
|
||||
id: "port_forwarding".to_string(),
|
||||
title: "TTY Forwarding".to_string(),
|
||||
description: "Forward terminal sessions over TCP".to_string(),
|
||||
content: r#"TTY forwarding allows remote terminal access:
|
||||
|
||||
1. Go to Settings > Advanced
|
||||
2. Enable TTY Forwarding
|
||||
3. Configure the local port
|
||||
4. Connect using: telnet localhost <port>
|
||||
|
||||
This is useful for accessing terminals from other applications."#.to_string(),
|
||||
action: None,
|
||||
completed: false,
|
||||
order: 2,
|
||||
},
|
||||
TutorialStep {
|
||||
id: "cli_tool".to_string(),
|
||||
title: "Command Line Interface".to_string(),
|
||||
description: "Use VibeTunnel from the terminal".to_string(),
|
||||
content: r#"Install the CLI tool for quick access:
|
||||
|
||||
1. Go to Settings > Advanced
|
||||
2. Click "Install CLI Tool"
|
||||
3. Open a new terminal
|
||||
4. Run: vt --help
|
||||
|
||||
Common commands:
|
||||
• vt new - Create new session
|
||||
• vt list - List sessions
|
||||
• vt attach <id> - Attach to session"#.to_string(),
|
||||
action: Some(TutorialAction {
|
||||
action_type: "install_cli".to_string(),
|
||||
payload: HashMap::new(),
|
||||
}),
|
||||
completed: false,
|
||||
order: 3,
|
||||
},
|
||||
],
|
||||
},
|
||||
TutorialCategory {
|
||||
id: "security".to_string(),
|
||||
name: "Security & Settings".to_string(),
|
||||
description: "Configure security and preferences".to_string(),
|
||||
icon: "🔒".to_string(),
|
||||
steps: vec![
|
||||
TutorialStep {
|
||||
id: "password_protection".to_string(),
|
||||
title: "Password Protection".to_string(),
|
||||
description: "Secure your web interface".to_string(),
|
||||
content: r#"Protect your sessions with a password:
|
||||
|
||||
1. Go to Settings > Dashboard
|
||||
2. Enable "Password Protection"
|
||||
3. Set a strong password
|
||||
4. Save settings
|
||||
|
||||
Anyone accessing the web interface will need this password."#.to_string(),
|
||||
action: Some(TutorialAction {
|
||||
action_type: "open_settings".to_string(),
|
||||
payload: HashMap::new(),
|
||||
}),
|
||||
completed: false,
|
||||
order: 1,
|
||||
},
|
||||
TutorialStep {
|
||||
id: "auto_launch".to_string(),
|
||||
title: "Auto Launch".to_string(),
|
||||
description: "Start VibeTunnel with your system".to_string(),
|
||||
content: r#"Configure VibeTunnel to start automatically:
|
||||
|
||||
1. Go to Settings > General
|
||||
2. Enable "Launch at startup"
|
||||
3. Choose startup behavior:
|
||||
• Start minimized
|
||||
• Show dock icon
|
||||
• Auto-start server
|
||||
|
||||
VibeTunnel will be ready whenever you need it."#.to_string(),
|
||||
action: None,
|
||||
completed: false,
|
||||
order: 2,
|
||||
},
|
||||
],
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
/// Get tutorial progress
|
||||
pub async fn get_progress(&self) -> TutorialProgress {
|
||||
let state = self.state.read().await;
|
||||
let tutorials = self.tutorials.read().await;
|
||||
|
||||
let total_steps: usize = tutorials.iter()
|
||||
.map(|c| c.steps.len())
|
||||
.sum();
|
||||
|
||||
let completed_steps = state.completed_steps.len();
|
||||
let percentage = if total_steps > 0 {
|
||||
(completed_steps as f32 / total_steps as f32 * 100.0) as u32
|
||||
} else {
|
||||
0
|
||||
};
|
||||
|
||||
TutorialProgress {
|
||||
total_steps,
|
||||
completed_steps,
|
||||
percentage,
|
||||
categories: tutorials.iter().map(|category| {
|
||||
let category_completed = category.steps.iter()
|
||||
.filter(|s| state.completed_steps.contains(&s.id))
|
||||
.count();
|
||||
|
||||
CategoryProgress {
|
||||
category_id: category.id.clone(),
|
||||
category_name: category.name.clone(),
|
||||
total_steps: category.steps.len(),
|
||||
completed_steps: category_completed,
|
||||
}
|
||||
}).collect(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Tutorial progress tracking
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct TutorialProgress {
|
||||
pub total_steps: usize,
|
||||
pub completed_steps: usize,
|
||||
pub percentage: u32,
|
||||
pub categories: Vec<CategoryProgress>,
|
||||
}
|
||||
|
||||
/// Category progress
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct CategoryProgress {
|
||||
pub category_id: String,
|
||||
pub category_name: String,
|
||||
pub total_steps: usize,
|
||||
pub completed_steps: usize,
|
||||
}
|
||||
|
|
@ -5,7 +5,7 @@
|
|||
"build": {
|
||||
"beforeDevCommand": "",
|
||||
"beforeBuildCommand": "",
|
||||
"frontendDist": "../dist"
|
||||
"frontendDist": "../public"
|
||||
},
|
||||
"app": {
|
||||
"windows": [{
|
||||
|
|
@ -39,7 +39,7 @@
|
|||
"icons/menu-bar-icon@2x.png",
|
||||
"icons/tray-icon.png",
|
||||
"icons/tray-icon@2x.png",
|
||||
"../public/**"
|
||||
"public/**"
|
||||
],
|
||||
"macOS": {
|
||||
"frameworks": [],
|
||||
|
|
|
|||
Loading…
Reference in a new issue