mirror of https://github.com/jetkvm/kvm.git
Compare commits
7 Commits
1a68f125e6
...
3e8ad04401
| Author | SHA1 | Date |
|---|---|---|
|
|
3e8ad04401 | |
|
|
37b1a8bf34 | |
|
|
ca8b06f4cf | |
|
|
33e099f258 | |
|
|
ea068414dc | |
|
|
3e2df4e651 | |
|
|
f8c2a95381 |
|
|
@ -301,13 +301,14 @@ export JETKVM_PROXY_URL="ws://<IP>"
|
||||||
|
|
||||||
### Performance Profiling
|
### Performance Profiling
|
||||||
|
|
||||||
```bash
|
1. Enable `Developer Mode` on your JetKVM device
|
||||||
# Enable profiling
|
2. Add a password on the `Access` tab
|
||||||
go build -o bin/jetkvm_app -ldflags="-X main.enableProfiling=true" cmd/main.go
|
|
||||||
|
|
||||||
|
```bash
|
||||||
# Access profiling
|
# Access profiling
|
||||||
curl http://<IP>:6060/debug/pprof/
|
curl http://api:$JETKVM_PASSWORD@YOUR_DEVICE_IP/developer/pprof/
|
||||||
```
|
```
|
||||||
|
|
||||||
### Advanced Environment Variables
|
### Advanced Environment Variables
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
|
|
|
||||||
5
Makefile
5
Makefile
|
|
@ -63,14 +63,17 @@ build_dev_test: build_test2json build_gotestsum
|
||||||
|
|
||||||
frontend:
|
frontend:
|
||||||
cd ui && npm ci && npm run build:device && \
|
cd ui && npm ci && npm run build:device && \
|
||||||
find ../static/assets \
|
find ../static/ \
|
||||||
-type f \
|
-type f \
|
||||||
\( -name '*.js' \
|
\( -name '*.js' \
|
||||||
-o -name '*.css' \
|
-o -name '*.css' \
|
||||||
|
-o -name '*.html' \
|
||||||
|
-o -name '*.ico' \
|
||||||
-o -name '*.png' \
|
-o -name '*.png' \
|
||||||
-o -name '*.jpg' \
|
-o -name '*.jpg' \
|
||||||
-o -name '*.jpeg' \
|
-o -name '*.jpeg' \
|
||||||
-o -name '*.gif' \
|
-o -name '*.gif' \
|
||||||
|
-o -name '*.svg' \
|
||||||
-o -name '*.webp' \
|
-o -name '*.webp' \
|
||||||
-o -name '*.woff2' \
|
-o -name '*.woff2' \
|
||||||
\) \
|
\) \
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,71 @@
|
||||||
|
package utils
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"slices"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"golang.org/x/crypto/ssh"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ValidSSHKeyTypes is a list of valid SSH key types
|
||||||
|
//
|
||||||
|
// Please make sure that all the types in this list are supported by dropbear
|
||||||
|
// https://github.com/mkj/dropbear/blob/003c5fcaabc114430d5d14142e95ffdbbd2d19b6/src/signkey.c#L37
|
||||||
|
//
|
||||||
|
// ssh-dss is not allowed here as it's insecure
|
||||||
|
var ValidSSHKeyTypes = []string{
|
||||||
|
ssh.KeyAlgoRSA,
|
||||||
|
ssh.KeyAlgoED25519,
|
||||||
|
ssh.KeyAlgoECDSA256,
|
||||||
|
ssh.KeyAlgoECDSA384,
|
||||||
|
ssh.KeyAlgoECDSA521,
|
||||||
|
}
|
||||||
|
|
||||||
|
// ValidateSSHKey validates authorized_keys file content
|
||||||
|
func ValidateSSHKey(sshKey string) error {
|
||||||
|
// validate SSH key
|
||||||
|
var (
|
||||||
|
hasValidPublicKey = false
|
||||||
|
lastError = fmt.Errorf("no valid SSH key found")
|
||||||
|
)
|
||||||
|
for _, key := range strings.Split(sshKey, "\n") {
|
||||||
|
key = strings.TrimSpace(key)
|
||||||
|
|
||||||
|
// skip empty lines and comments
|
||||||
|
if key == "" || strings.HasPrefix(key, "#") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
parsedPublicKey, _, _, _, err := ssh.ParseAuthorizedKey([]byte(key))
|
||||||
|
if err != nil {
|
||||||
|
lastError = err
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if parsedPublicKey == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
parsedType := parsedPublicKey.Type()
|
||||||
|
textType := strings.Fields(key)[0]
|
||||||
|
|
||||||
|
if parsedType != textType {
|
||||||
|
lastError = fmt.Errorf("parsed SSH key type %s does not match type in text %s", parsedType, textType)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if !slices.Contains(ValidSSHKeyTypes, parsedType) {
|
||||||
|
lastError = fmt.Errorf("invalid SSH key type: %s", parsedType)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
hasValidPublicKey = true
|
||||||
|
}
|
||||||
|
|
||||||
|
if !hasValidPublicKey {
|
||||||
|
return lastError
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,208 @@
|
||||||
|
package utils
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestValidateSSHKey(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
sshKey string
|
||||||
|
expectError bool
|
||||||
|
errorMsg string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "valid RSA key",
|
||||||
|
sshKey: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDiYUb9Fy2vlPfO+HwubnshimpVrWPoePyvyN+jPC5gWqZSycjMy6Is2vFVn7oQc72bkY0wZalspT5wUOwKtltSoLpL7vcqGL9zHVw4yjYXtPGIRd3zLpU9wdngevnepPQWTX3LvZTZfmOsrGoMDKIG+Lbmiq/STMuWYecIqMp7tUKRGS8vfAmpu6MsrN9/4UTcdWWXYWJQQn+2nCyMz28jYlWRsKtqFK6owrdZWt8WQnPN+9Upcf2ByQje+0NLnpNrnh+yd2ocuVW9wQYKAZXy7IaTfEJwd5m34sLwkqlZTaBBcmWJU+3RfpYXE763cf3rUoPIGQ8eUEBJ8IdM4vhp test@example.com",
|
||||||
|
expectError: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "valid ED25519 key",
|
||||||
|
sshKey: "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIBSbM8wuD5ab0nHsXaYOqaD3GLLUwmDzSk79Xi/N+H2j test@example.com",
|
||||||
|
expectError: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "valid ECDSA key",
|
||||||
|
sshKey: "ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBAlTkxIo4mXBR+gEX0Q74BpYX4bFFHoX+8Uz7tsob8HvsnMvsEE+BW9h9XrbWX4/4ppL/o6sHbvsqNr9HcyKfdc= test@example.com",
|
||||||
|
expectError: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "multiple valid keys",
|
||||||
|
sshKey: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDiYUb9Fy2vlPfO+HwubnshimpVrWPoePyvyN+jPC5gWqZSycjMy6Is2vFVn7oQc72bkY0wZalspT5wUOwKtltSoLpL7vcqGL9zHVw4yjYXtPGIRd3zLpU9wdngevnepPQWTX3LvZTZfmOsrGoMDKIG+Lbmiq/STMuWYecIqMp7tUKRGS8vfAmpu6MsrN9/4UTcdWWXYWJQQn+2nCyMz28jYlWRsKtqFK6owrdZWt8WQnPN+9Upcf2ByQje+0NLnpNrnh+yd2ocuVW9wQYKAZXy7IaTfEJwd5m34sLwkqlZTaBBcmWJU+3RfpYXE763cf3rUoPIGQ8eUEBJ8IdM4vhp test@example.com\nssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIBSbM8wuD5ab0nHsXaYOqaD3GLLUwmDzSk79Xi/N+H2j test@example.com",
|
||||||
|
expectError: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "valid key with comment",
|
||||||
|
sshKey: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDiYUb9Fy2vlPfO+HwubnshimpVrWPoePyvyN+jPC5gWqZSycjMy6Is2vFVn7oQc72bkY0wZalspT5wUOwKtltSoLpL7vcqGL9zHVw4yjYXtPGIRd3zLpU9wdngevnepPQWTX3LvZTZfmOsrGoMDKIG+Lbmiq/STMuWYecIqMp7tUKRGS8vfAmpu6MsrN9/4UTcdWWXYWJQQn+2nCyMz28jYlWRsKtqFK6owrdZWt8WQnPN+9Upcf2ByQje+0NLnpNrnh+yd2ocuVW9wQYKAZXy7IaTfEJwd5m34sLwkqlZTaBBcmWJU+3RfpYXE763cf3rUoPIGQ8eUEBJ8IdM4vhp user@example.com",
|
||||||
|
expectError: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "valid key with options and comment (we don't support options yet)",
|
||||||
|
sshKey: "command=\"echo hello\" ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDiYUb9Fy2vlPfO+HwubnshimpVrWPoePyvyN+jPC5gWqZSycjMy6Is2vFVn7oQc72bkY0wZalspT5wUOwKtltSoLpL7vcqGL9zHVw4yjYXtPGIRd3zLpU9wdngevnepPQWTX3LvZTZfmOsrGoMDKIG+Lbmiq/STMuWYecIqMp7tUKRGS8vfAmpu6MsrN9/4UTcdWWXYWJQQn+2nCyMz28jYlWRsKtqFK6owrdZWt8WQnPN+9Upcf2ByQje+0NLnpNrnh+yd2ocuVW9wQYKAZXy7IaTfEJwd5m34sLwkqlZTaBBcmWJU+3RfpYXE763cf3rUoPIGQ8eUEBJ8IdM4vhp user@example.com",
|
||||||
|
expectError: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "empty string",
|
||||||
|
sshKey: "",
|
||||||
|
expectError: true,
|
||||||
|
errorMsg: "no valid SSH key found",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "whitespace only",
|
||||||
|
sshKey: " \n\t \n ",
|
||||||
|
expectError: true,
|
||||||
|
errorMsg: "no valid SSH key found",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "comment only",
|
||||||
|
sshKey: "# This is a comment\n# Another comment",
|
||||||
|
expectError: true,
|
||||||
|
errorMsg: "no valid SSH key found",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "invalid key format",
|
||||||
|
sshKey: "not-a-valid-ssh-key",
|
||||||
|
expectError: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "invalid key type",
|
||||||
|
sshKey: "ssh-dss AAAAB3NzaC1kc3MAAACBAOeB...",
|
||||||
|
expectError: true,
|
||||||
|
errorMsg: "invalid SSH key type: ssh-dss",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "unsupported key type",
|
||||||
|
sshKey: "ssh-rsa-cert-v01@openssh.com AAAAB3NzaC1yc2EAAAADAQABAAABgQC7vbqajDhA...",
|
||||||
|
expectError: true,
|
||||||
|
errorMsg: "invalid SSH key type: ssh-rsa-cert-v01@openssh.com",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "malformed key data",
|
||||||
|
sshKey: "ssh-rsa invalid-base64-data",
|
||||||
|
expectError: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "type mismatch",
|
||||||
|
sshKey: "ssh-rsa AAAAC3NzaC1lZDI1NTE5AAAAIGomKoH...",
|
||||||
|
expectError: true,
|
||||||
|
errorMsg: "parsed SSH key type ssh-ed25519 does not match type in text ssh-rsa",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "mixed valid and invalid keys",
|
||||||
|
sshKey: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDiYUb9Fy2vlPfO+HwubnshimpVrWPoePyvyN+jPC5gWqZSycjMy6Is2vFVn7oQc72bkY0wZalspT5wUOwKtltSoLpL7vcqGL9zHVw4yjYXtPGIRd3zLpU9wdngevnepPQWTX3LvZTZfmOsrGoMDKIG+Lbmiq/STMuWYecIqMp7tUKRGS8vfAmpu6MsrN9/4UTcdWWXYWJQQn+2nCyMz28jYlWRsKtqFK6owrdZWt8WQnPN+9Upcf2ByQje+0NLnpNrnh+yd2ocuVW9wQYKAZXy7IaTfEJwd5m34sLwkqlZTaBBcmWJU+3RfpYXE763cf3rUoPIGQ8eUEBJ8IdM4vhp test@example.com\ninvalid-key\nssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIBSbM8wuD5ab0nHsXaYOqaD3GLLUwmDzSk79Xi/N+H2j test@example.com",
|
||||||
|
expectError: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "valid key with empty lines and comments",
|
||||||
|
sshKey: "# Comment line\n\nssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDiYUb9Fy2vlPfO+HwubnshimpVrWPoePyvyN+jPC5gWqZSycjMy6Is2vFVn7oQc72bkY0wZalspT5wUOwKtltSoLpL7vcqGL9zHVw4yjYXtPGIRd3zLpU9wdngevnepPQWTX3LvZTZfmOsrGoMDKIG+Lbmiq/STMuWYecIqMp7tUKRGS8vfAmpu6MsrN9/4UTcdWWXYWJQQn+2nCyMz28jYlWRsKtqFK6owrdZWt8WQnPN+9Upcf2ByQje+0NLnpNrnh+yd2ocuVW9wQYKAZXy7IaTfEJwd5m34sLwkqlZTaBBcmWJU+3RfpYXE763cf3rUoPIGQ8eUEBJ8IdM4vhp test@example.com\n# Another comment\n\t\n",
|
||||||
|
expectError: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "all invalid keys",
|
||||||
|
sshKey: "invalid-key-1\ninvalid-key-2\nssh-dss AAAAB3NzaC1kc3MAAACBAOeB...",
|
||||||
|
expectError: true,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
err := ValidateSSHKey(tt.sshKey)
|
||||||
|
|
||||||
|
if tt.expectError {
|
||||||
|
if err == nil {
|
||||||
|
t.Errorf("ValidateSSHKey() expected error but got none")
|
||||||
|
} else if tt.errorMsg != "" && !strings.ContainsAny(err.Error(), tt.errorMsg) {
|
||||||
|
t.Errorf("ValidateSSHKey() error = %v, expected to contain %v", err, tt.errorMsg)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("ValidateSSHKey() unexpected error = %v", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidSSHKeyTypes(t *testing.T) {
|
||||||
|
expectedTypes := []string{
|
||||||
|
"ssh-rsa",
|
||||||
|
"ssh-ed25519",
|
||||||
|
"ecdsa-sha2-nistp256",
|
||||||
|
"ecdsa-sha2-nistp384",
|
||||||
|
"ecdsa-sha2-nistp521",
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(ValidSSHKeyTypes) != len(expectedTypes) {
|
||||||
|
t.Errorf("ValidSSHKeyTypes length = %d, expected %d", len(ValidSSHKeyTypes), len(expectedTypes))
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, expectedType := range expectedTypes {
|
||||||
|
found := false
|
||||||
|
for _, actualType := range ValidSSHKeyTypes {
|
||||||
|
if actualType == expectedType {
|
||||||
|
found = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !found {
|
||||||
|
t.Errorf("ValidSSHKeyTypes missing expected type: %s", expectedType)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestValidateSSHKeyEdgeCases tests edge cases and boundary conditions
|
||||||
|
func TestValidateSSHKeyEdgeCases(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
sshKey string
|
||||||
|
expectError bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "key with only type",
|
||||||
|
sshKey: "ssh-rsa",
|
||||||
|
expectError: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "key with type and empty data",
|
||||||
|
sshKey: "ssh-rsa ",
|
||||||
|
expectError: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "key with type and whitespace data",
|
||||||
|
sshKey: "ssh-rsa \t ",
|
||||||
|
expectError: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "key with multiple spaces between type and data",
|
||||||
|
sshKey: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDiYUb9Fy2vlPfO+HwubnshimpVrWPoePyvyN+jPC5gWqZSycjMy6Is2vFVn7oQc72bkY0wZalspT5wUOwKtltSoLpL7vcqGL9zHVw4yjYXtPGIRd3zLpU9wdngevnepPQWTX3LvZTZfmOsrGoMDKIG+Lbmiq/STMuWYecIqMp7tUKRGS8vfAmpu6MsrN9/4UTcdWWXYWJQQn+2nCyMz28jYlWRsKtqFK6owrdZWt8WQnPN+9Upcf2ByQje+0NLnpNrnh+yd2ocuVW9wQYKAZXy7IaTfEJwd5m34sLwkqlZTaBBcmWJU+3RfpYXE763cf3rUoPIGQ8eUEBJ8IdM4vhp test@example.com",
|
||||||
|
expectError: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "key with tabs",
|
||||||
|
sshKey: "\tssh-rsa\tAAAAB3NzaC1yc2EAAAADAQABAAABAQDiYUb9Fy2vlPfO+HwubnshimpVrWPoePyvyN+jPC5gWqZSycjMy6Is2vFVn7oQc72bkY0wZalspT5wUOwKtltSoLpL7vcqGL9zHVw4yjYXtPGIRd3zLpU9wdngevnepPQWTX3LvZTZfmOsrGoMDKIG+Lbmiq/STMuWYecIqMp7tUKRGS8vfAmpu6MsrN9/4UTcdWWXYWJQQn+2nCyMz28jYlWRsKtqFK6owrdZWt8WQnPN+9Upcf2ByQje+0NLnpNrnh+yd2ocuVW9wQYKAZXy7IaTfEJwd5m34sLwkqlZTaBBcmWJU+3RfpYXE763cf3rUoPIGQ8eUEBJ8IdM4vhp test@example.com",
|
||||||
|
expectError: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "very long line",
|
||||||
|
sshKey: "ssh-rsa " + string(make([]byte, 10000)),
|
||||||
|
expectError: true,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
err := ValidateSSHKey(tt.sshKey)
|
||||||
|
|
||||||
|
if tt.expectError {
|
||||||
|
if err == nil {
|
||||||
|
t.Errorf("ValidateSSHKey() expected error but got none")
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("ValidateSSHKey() unexpected error = %v", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
47
jsonrpc.go
47
jsonrpc.go
|
|
@ -17,6 +17,7 @@ import (
|
||||||
"go.bug.st/serial"
|
"go.bug.st/serial"
|
||||||
|
|
||||||
"github.com/jetkvm/kvm/internal/usbgadget"
|
"github.com/jetkvm/kvm/internal/usbgadget"
|
||||||
|
"github.com/jetkvm/kvm/internal/utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
type JSONRPCRequest struct {
|
type JSONRPCRequest struct {
|
||||||
|
|
@ -429,21 +430,27 @@ func rpcGetSSHKeyState() (string, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func rpcSetSSHKeyState(sshKey string) error {
|
func rpcSetSSHKeyState(sshKey string) error {
|
||||||
if sshKey != "" {
|
if sshKey == "" {
|
||||||
// Create directory if it doesn't exist
|
|
||||||
if err := os.MkdirAll(sshKeyDir, 0700); err != nil {
|
|
||||||
return fmt.Errorf("failed to create SSH key directory: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Write SSH key to file
|
|
||||||
if err := os.WriteFile(sshKeyFile, []byte(sshKey), 0600); err != nil {
|
|
||||||
return fmt.Errorf("failed to write SSH key: %w", err)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// Remove SSH key file if empty string is provided
|
// Remove SSH key file if empty string is provided
|
||||||
if err := os.Remove(sshKeyFile); err != nil && !os.IsNotExist(err) {
|
if err := os.Remove(sshKeyFile); err != nil && !os.IsNotExist(err) {
|
||||||
return fmt.Errorf("failed to remove SSH key file: %w", err)
|
return fmt.Errorf("failed to remove SSH key file: %w", err)
|
||||||
}
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate SSH key
|
||||||
|
if err := utils.ValidateSSHKey(sshKey); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create directory if it doesn't exist
|
||||||
|
if err := os.MkdirAll(sshKeyDir, 0700); err != nil {
|
||||||
|
return fmt.Errorf("failed to create SSH key directory: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write SSH key to file
|
||||||
|
if err := os.WriteFile(sshKeyFile, []byte(sshKey), 0600); err != nil {
|
||||||
|
return fmt.Errorf("failed to write SSH key: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
|
@ -1049,6 +1056,24 @@ func rpcSetLocalLoopbackOnly(enabled bool) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// JSONRPCHandler represents a JSON-RPC handler
|
||||||
|
type JSONRPCHandler struct {
|
||||||
|
Type reflect.Type
|
||||||
|
Params []string
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetJSONRPCHandlers returns the JSON-RPC handlers
|
||||||
|
func GetJSONRPCHandlers() map[string]JSONRPCHandler {
|
||||||
|
ret := make(map[string]JSONRPCHandler)
|
||||||
|
for name, handler := range rpcHandlers {
|
||||||
|
ret[name] = JSONRPCHandler{
|
||||||
|
Type: reflect.ValueOf(handler.Func).Type(),
|
||||||
|
Params: handler.Params,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
|
||||||
var rpcHandlers = map[string]RPCHandler{
|
var rpcHandlers = map[string]RPCHandler{
|
||||||
"ping": {Func: rpcPing},
|
"ping": {Func: rpcPing},
|
||||||
"reboot": {Func: rpcReboot, Params: []string{"force"}},
|
"reboot": {Func: rpcReboot, Params: []string{"force"}},
|
||||||
|
|
|
||||||
Binary file not shown.
|
|
@ -0,0 +1,334 @@
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"go/ast"
|
||||||
|
"go/parser"
|
||||||
|
"go/token"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/rs/zerolog/log"
|
||||||
|
)
|
||||||
|
|
||||||
|
// getStringAliasInfoWithReflection uses reflection to automatically detect constants
|
||||||
|
// This approach tries to find constants by examining the actual values
|
||||||
|
func getStringAliasInfoWithReflection(searchPath string) []StringAliasInfo {
|
||||||
|
log.Debug().Str("searchPath", searchPath).Msg("Detecting string aliases and constants in single pass")
|
||||||
|
|
||||||
|
// Detect both string aliases and their constants in a single file system walk
|
||||||
|
result := detectStringAliasesWithConstants(searchPath)
|
||||||
|
|
||||||
|
// If reflection didn't work, throw an error
|
||||||
|
if len(result) == 0 {
|
||||||
|
log.Fatal().Msg("No string aliases with constants could be detected. Make sure the types are defined with constants in Go files.")
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Debug().Int("detected", len(result)).Msg("String alias detection completed")
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// detectStringAliasesWithConstants detects both string aliases and their constants in a single file system walk
|
||||||
|
func detectStringAliasesWithConstants(searchPath string) []StringAliasInfo {
|
||||||
|
var result []StringAliasInfo
|
||||||
|
|
||||||
|
// Walk the specified directory to find Go files
|
||||||
|
err := filepath.Walk(searchPath, func(path string, info os.FileInfo, err error) error {
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Skip directories and non-Go files
|
||||||
|
if info.IsDir() || !strings.HasSuffix(path, ".go") {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Skip test files and our own tool files
|
||||||
|
if strings.Contains(path, "_test.go") || strings.Contains(path, "scripts/jsonrpc_typings") {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse the file to find both string aliases and their constants
|
||||||
|
aliases := findStringAliasesWithConstantsInFile(path)
|
||||||
|
result = append(result, aliases...)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal().Err(err).Msg("Error walking directory for string alias detection")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove duplicates based on type name
|
||||||
|
uniqueAliases := make([]StringAliasInfo, 0)
|
||||||
|
seen := make(map[string]bool)
|
||||||
|
for _, alias := range result {
|
||||||
|
if !seen[alias.Name] {
|
||||||
|
seen[alias.Name] = true
|
||||||
|
uniqueAliases = append(uniqueAliases, alias)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return uniqueAliases
|
||||||
|
}
|
||||||
|
|
||||||
|
// findStringAliasesWithConstantsInFile finds both string aliases and their constants in a single Go file
|
||||||
|
func findStringAliasesWithConstantsInFile(filePath string) []StringAliasInfo {
|
||||||
|
var result []StringAliasInfo
|
||||||
|
|
||||||
|
// Parse the Go file
|
||||||
|
fset := token.NewFileSet()
|
||||||
|
node, err := parser.ParseFile(fset, filePath, nil, parser.ParseComments)
|
||||||
|
if err != nil {
|
||||||
|
log.Debug().Err(err).Str("file", filePath).Msg("Failed to parse file")
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// First pass: collect all string alias type names
|
||||||
|
stringAliases := make(map[string]bool)
|
||||||
|
ast.Inspect(node, func(n ast.Node) bool {
|
||||||
|
genDecl, ok := n.(*ast.GenDecl)
|
||||||
|
if !ok || genDecl.Tok != token.TYPE {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, spec := range genDecl.Specs {
|
||||||
|
typeSpec, ok := spec.(*ast.TypeSpec)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if this is a string alias (type Name string)
|
||||||
|
if ident, ok := typeSpec.Type.(*ast.Ident); ok && ident.Name == "string" {
|
||||||
|
stringAliases[typeSpec.Name.Name] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
})
|
||||||
|
|
||||||
|
// Second pass: find constants for the string aliases we found
|
||||||
|
ast.Inspect(node, func(n ast.Node) bool {
|
||||||
|
genDecl, ok := n.(*ast.GenDecl)
|
||||||
|
if !ok || genDecl.Tok != token.CONST {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process each constant specification in the declaration
|
||||||
|
for _, spec := range genDecl.Specs {
|
||||||
|
valueSpec, ok := spec.(*ast.ValueSpec)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if this constant is typed with one of our string aliases
|
||||||
|
if valueSpec.Type == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
ident, ok := valueSpec.Type.(*ast.Ident)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
typeName := ident.Name
|
||||||
|
|
||||||
|
// Check if this type is one of our string aliases
|
||||||
|
if _, ok := stringAliases[typeName]; !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract string literal values
|
||||||
|
for _, value := range valueSpec.Values {
|
||||||
|
basicLit, ok := value.(*ast.BasicLit)
|
||||||
|
if !ok || basicLit.Kind != token.STRING {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove quotes from string literal
|
||||||
|
constantValue := strings.Trim(basicLit.Value, "\"")
|
||||||
|
|
||||||
|
// Find or create the StringAliasInfo for this type
|
||||||
|
var aliasInfo *StringAliasInfo
|
||||||
|
for i := range result {
|
||||||
|
if result[i].Name == typeName {
|
||||||
|
aliasInfo = &result[i]
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if aliasInfo == nil {
|
||||||
|
result = append(result, StringAliasInfo{
|
||||||
|
Name: typeName,
|
||||||
|
Constants: []string{},
|
||||||
|
})
|
||||||
|
aliasInfo = &result[len(result)-1]
|
||||||
|
}
|
||||||
|
|
||||||
|
aliasInfo.Constants = append(aliasInfo.Constants, constantValue)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
})
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// batchDetectConstantsForTypes efficiently detects constants for multiple types in a single file system walk
|
||||||
|
func batchDetectConstantsForTypes(typeNames []string, searchPath string) map[string][]string {
|
||||||
|
result := make(map[string][]string)
|
||||||
|
|
||||||
|
// Initialize result map
|
||||||
|
for _, typeName := range typeNames {
|
||||||
|
result[typeName] = []string{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Walk the specified directory to find Go files
|
||||||
|
err := filepath.Walk(searchPath, func(path string, info os.FileInfo, err error) error {
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Skip directories and non-Go files
|
||||||
|
if info.IsDir() || !strings.HasSuffix(path, ".go") {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Skip test files and our own tool files
|
||||||
|
if strings.Contains(path, "_test.go") || strings.Contains(path, "scripts/jsonrpc_typings") {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if this file contains any of the types we're looking for
|
||||||
|
fileContainsAnyType := false
|
||||||
|
for _, typeName := range typeNames {
|
||||||
|
if fileContainsType(path, typeName) {
|
||||||
|
fileContainsAnyType = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !fileContainsAnyType {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Debug().Str("file", path).Strs("types", typeNames).Msg("Parsing file for constants")
|
||||||
|
|
||||||
|
// Parse constants for all types from this file
|
||||||
|
fileConstants := batchParseConstantsFromFile(path, typeNames)
|
||||||
|
|
||||||
|
// Merge results
|
||||||
|
for typeName, constants := range fileConstants {
|
||||||
|
if len(constants) > 0 {
|
||||||
|
result[typeName] = constants
|
||||||
|
log.Debug().Str("type", typeName).Strs("constants", constants).Str("file", path).Msg("Found constants")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal().Err(err).Msg("Error searching for constants")
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// batchParseConstantsFromFile parses constants for multiple types from a single Go file
|
||||||
|
func batchParseConstantsFromFile(filePath string, typeNames []string) map[string][]string {
|
||||||
|
result := make(map[string][]string)
|
||||||
|
|
||||||
|
// Initialize result map
|
||||||
|
for _, typeName := range typeNames {
|
||||||
|
result[typeName] = []string{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse the Go file
|
||||||
|
fset := token.NewFileSet()
|
||||||
|
node, err := parser.ParseFile(fset, filePath, nil, parser.ParseComments)
|
||||||
|
if err != nil {
|
||||||
|
log.Debug().Err(err).Str("file", filePath).Msg("Failed to parse file")
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// Walk the AST to find constant declarations
|
||||||
|
ast.Inspect(node, func(n ast.Node) bool {
|
||||||
|
// Look for GenDecl nodes (const declarations)
|
||||||
|
genDecl, ok := n.(*ast.GenDecl)
|
||||||
|
if !ok || genDecl.Tok != token.CONST {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process each constant specification in the declaration
|
||||||
|
for _, spec := range genDecl.Specs {
|
||||||
|
valueSpec, ok := spec.(*ast.ValueSpec)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if this constant is typed with one of our target types
|
||||||
|
if valueSpec.Type != nil {
|
||||||
|
if ident, ok := valueSpec.Type.(*ast.Ident); ok {
|
||||||
|
typeName := ident.Name
|
||||||
|
|
||||||
|
// Check if this type is one we're looking for
|
||||||
|
if contains(typeNames, typeName) {
|
||||||
|
// Extract string literal values
|
||||||
|
for _, value := range valueSpec.Values {
|
||||||
|
if basicLit, ok := value.(*ast.BasicLit); ok && basicLit.Kind == token.STRING {
|
||||||
|
// Remove quotes from string literal
|
||||||
|
constantValue := strings.Trim(basicLit.Value, "\"")
|
||||||
|
result[typeName] = append(result[typeName], constantValue)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
})
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// contains checks if a slice contains a string
|
||||||
|
func contains(slice []string, item string) bool {
|
||||||
|
for _, s := range slice {
|
||||||
|
if s == item {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// fileContainsType checks if a Go file contains a type definition for the given type name
|
||||||
|
func fileContainsType(filePath, typeName string) bool {
|
||||||
|
fset := token.NewFileSet()
|
||||||
|
node, err := parser.ParseFile(fset, filePath, nil, parser.ParseComments)
|
||||||
|
if err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Walk the AST to find type definitions
|
||||||
|
found := false
|
||||||
|
ast.Inspect(node, func(n ast.Node) bool {
|
||||||
|
switch x := n.(type) {
|
||||||
|
case *ast.GenDecl:
|
||||||
|
if x.Tok == token.TYPE {
|
||||||
|
for _, spec := range x.Specs {
|
||||||
|
if typeSpec, ok := spec.(*ast.TypeSpec); ok {
|
||||||
|
if typeSpec.Name.Name == typeName {
|
||||||
|
found = true
|
||||||
|
return false // Stop searching
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return !found // Continue searching if not found yet
|
||||||
|
})
|
||||||
|
|
||||||
|
return found
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,45 @@
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"flag"
|
||||||
|
"os"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/rs/zerolog"
|
||||||
|
"github.com/rs/zerolog/log"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
// Parse command line flags
|
||||||
|
logLevel := flag.String("log-level", "info", "Log level (trace, debug, info, warn, error, fatal, panic)")
|
||||||
|
searchPath := flag.String("search-path", ".", "Path to search for Go files containing type definitions")
|
||||||
|
outputPath := flag.String("output", "jsonrpc.ts", "Output path for the generated TypeScript file")
|
||||||
|
flag.Parse()
|
||||||
|
|
||||||
|
// Set log level
|
||||||
|
level, err := zerolog.ParseLevel(strings.ToLower(*logLevel))
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal().Err(err).Str("level", *logLevel).Msg("Invalid log level")
|
||||||
|
}
|
||||||
|
zerolog.SetGlobalLevel(level)
|
||||||
|
|
||||||
|
// Configure zerolog for pretty console output
|
||||||
|
log.Logger = log.Output(zerolog.ConsoleWriter{Out: os.Stderr})
|
||||||
|
|
||||||
|
// Create API schema
|
||||||
|
log.Info().Str("search-path", *searchPath).Msg("Creating API schema from JSON-RPC handlers")
|
||||||
|
schema := NewAPISchema(*searchPath)
|
||||||
|
|
||||||
|
// Generate TypeScript typings
|
||||||
|
log.Info().Msg("Generating TypeScript typings")
|
||||||
|
typings := generateTypeScriptTypings(schema, *searchPath)
|
||||||
|
|
||||||
|
// Write to output file
|
||||||
|
log.Info().Str("file", *outputPath).Msg("Writing TypeScript definitions to file")
|
||||||
|
err = os.WriteFile(*outputPath, []byte(typings), 0644)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal().Err(err).Str("file", *outputPath).Msg("Failed to write TypeScript definitions")
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Info().Str("file", *outputPath).Msg("TypeScript typings generated successfully")
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,433 @@
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"reflect"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/jetkvm/kvm"
|
||||||
|
"github.com/rs/zerolog/log"
|
||||||
|
)
|
||||||
|
|
||||||
|
// NewAPISchema creates a new API schema from the JSON-RPC handlers
|
||||||
|
func NewAPISchema(searchPath string) *APISchema {
|
||||||
|
schema := &APISchema{
|
||||||
|
Handlers: make(map[string]APIHandler),
|
||||||
|
Types: make(map[string]APIType),
|
||||||
|
}
|
||||||
|
|
||||||
|
handlers := kvm.GetJSONRPCHandlers()
|
||||||
|
log.Info().Int("count", len(handlers)).Msg("Processing JSON-RPC handlers")
|
||||||
|
|
||||||
|
for name, handler := range handlers {
|
||||||
|
log.Debug().Str("handler", name).Msg("Building API handler")
|
||||||
|
apiHandler := buildAPIHandler(name, handler, schema)
|
||||||
|
schema.Handlers[name] = apiHandler
|
||||||
|
}
|
||||||
|
|
||||||
|
schema.HandlerCount = len(schema.Handlers)
|
||||||
|
schema.TypeCount = len(schema.Types)
|
||||||
|
|
||||||
|
log.Info().
|
||||||
|
Int("handlers", schema.HandlerCount).
|
||||||
|
Int("types", schema.TypeCount).
|
||||||
|
Msg("API schema created successfully")
|
||||||
|
|
||||||
|
return schema
|
||||||
|
}
|
||||||
|
|
||||||
|
// buildAPIHandler constructs an APIHandler from a JSON-RPC handler
|
||||||
|
func buildAPIHandler(name string, handler kvm.JSONRPCHandler, schema *APISchema) APIHandler {
|
||||||
|
apiHandler := APIHandler{
|
||||||
|
Name: name,
|
||||||
|
FunctionType: handler.Type.String(),
|
||||||
|
ParameterNames: handler.Params,
|
||||||
|
Parameters: make([]APIParameter, 0, handler.Type.NumIn()),
|
||||||
|
ReturnValues: make([]APIReturnValue, 0, handler.Type.NumOut()),
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process parameters
|
||||||
|
for i := 0; i < handler.Type.NumIn(); i++ {
|
||||||
|
paramType := handler.Type.In(i)
|
||||||
|
paramName := getParameterName(i, handler.Params)
|
||||||
|
|
||||||
|
apiParam := APIParameter{
|
||||||
|
Name: paramName,
|
||||||
|
Type: paramType.String(),
|
||||||
|
}
|
||||||
|
|
||||||
|
if apiType := extractAPIType(paramType, schema); apiType != nil {
|
||||||
|
apiParam.APIType = apiType
|
||||||
|
schema.Types[apiType.Name] = *apiType
|
||||||
|
}
|
||||||
|
|
||||||
|
apiHandler.Parameters = append(apiHandler.Parameters, apiParam)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process return values
|
||||||
|
for i := 0; i < handler.Type.NumOut(); i++ {
|
||||||
|
returnType := handler.Type.Out(i)
|
||||||
|
|
||||||
|
apiReturn := APIReturnValue{
|
||||||
|
Index: i,
|
||||||
|
Type: returnType.String(),
|
||||||
|
}
|
||||||
|
|
||||||
|
if apiType := extractAPIType(returnType, schema); apiType != nil {
|
||||||
|
apiReturn.APIType = apiType
|
||||||
|
schema.Types[apiType.Name] = *apiType
|
||||||
|
}
|
||||||
|
|
||||||
|
apiHandler.ReturnValues = append(apiHandler.ReturnValues, apiReturn)
|
||||||
|
}
|
||||||
|
|
||||||
|
return apiHandler
|
||||||
|
}
|
||||||
|
|
||||||
|
// getParameterName safely retrieves a parameter name by index
|
||||||
|
func getParameterName(index int, paramNames []string) string {
|
||||||
|
if index < len(paramNames) {
|
||||||
|
return paramNames[index]
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// extractAPIType extracts API type information from a reflect.Type
|
||||||
|
// It recursively finds and adds nested struct types to the schema
|
||||||
|
func extractAPIType(t reflect.Type, schema *APISchema) *APIType {
|
||||||
|
if t == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
switch t.Kind() {
|
||||||
|
case reflect.Ptr:
|
||||||
|
return extractPointerType(t, schema)
|
||||||
|
case reflect.Slice:
|
||||||
|
return extractSliceType(t, schema)
|
||||||
|
case reflect.Array:
|
||||||
|
return extractArrayType(t, schema)
|
||||||
|
case reflect.Map:
|
||||||
|
return extractMapType(t, schema)
|
||||||
|
case reflect.Struct:
|
||||||
|
return extractStructType(t, schema)
|
||||||
|
case reflect.Interface:
|
||||||
|
return extractInterfaceType(t)
|
||||||
|
default:
|
||||||
|
return extractBasicType(t)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// extractPointerType handles pointer types
|
||||||
|
func extractPointerType(t reflect.Type, schema *APISchema) *APIType {
|
||||||
|
elemType := extractAPIType(t.Elem(), schema)
|
||||||
|
if elemType == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
elemType.IsPointer = true
|
||||||
|
elemType.Name = "*" + elemType.Name
|
||||||
|
return elemType
|
||||||
|
}
|
||||||
|
|
||||||
|
// extractSliceType handles slice types
|
||||||
|
func extractSliceType(t reflect.Type, schema *APISchema) *APIType {
|
||||||
|
elemType := extractAPIType(t.Elem(), schema)
|
||||||
|
if elemType == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
elemType.IsSlice = true
|
||||||
|
elemType.SliceType = elemType.Name
|
||||||
|
elemType.Name = "[]" + elemType.Name
|
||||||
|
return elemType
|
||||||
|
}
|
||||||
|
|
||||||
|
// extractArrayType handles array types
|
||||||
|
func extractArrayType(t reflect.Type, schema *APISchema) *APIType {
|
||||||
|
elemType := extractAPIType(t.Elem(), schema)
|
||||||
|
if elemType == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
elemType.Name = fmt.Sprintf("[%d]%s", t.Len(), elemType.Name)
|
||||||
|
return elemType
|
||||||
|
}
|
||||||
|
|
||||||
|
// extractMapType handles map types
|
||||||
|
func extractMapType(t reflect.Type, schema *APISchema) *APIType {
|
||||||
|
keyType := extractAPIType(t.Key(), schema)
|
||||||
|
valueType := extractAPIType(t.Elem(), schema)
|
||||||
|
|
||||||
|
if keyType == nil || valueType == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return &APIType{
|
||||||
|
Name: fmt.Sprintf("map[%s]%s", keyType.Name, valueType.Name),
|
||||||
|
Kind: TypeKindMap,
|
||||||
|
IsMap: true,
|
||||||
|
MapKeyType: keyType.Name,
|
||||||
|
MapValueType: valueType.Name,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// extractStructType handles struct types
|
||||||
|
func extractStructType(t reflect.Type, schema *APISchema) *APIType {
|
||||||
|
// Skip null.* structs as they are handled as optional properties
|
||||||
|
if strings.HasPrefix(t.String(), "null.") {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
apiType := &APIType{
|
||||||
|
Name: t.String(),
|
||||||
|
Kind: TypeKindStruct,
|
||||||
|
Fields: make([]APIField, 0, t.NumField()),
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract package name
|
||||||
|
if pkgPath := t.PkgPath(); pkgPath != "" {
|
||||||
|
apiType.Package = extractPackageName(pkgPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract fields
|
||||||
|
embeddedStructs := make([]string, 0)
|
||||||
|
regularFields := make([]APIField, 0)
|
||||||
|
|
||||||
|
for i := 0; i < t.NumField(); i++ {
|
||||||
|
field := t.Field(i)
|
||||||
|
|
||||||
|
// Skip unexported fields
|
||||||
|
if !field.IsExported() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if this is an embedded struct (anonymous field)
|
||||||
|
if field.Anonymous && field.Type.Kind() == reflect.Struct {
|
||||||
|
embeddedStructs = append(embeddedStructs, field.Type.String())
|
||||||
|
// Recursively extract nested struct types from embedded structs
|
||||||
|
if nestedType := extractAPIType(field.Type, schema); nestedType != nil {
|
||||||
|
if nestedType.Kind == TypeKindStruct {
|
||||||
|
schema.Types[nestedType.Name] = *nestedType
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
apiField := buildAPIField(field)
|
||||||
|
regularFields = append(regularFields, apiField)
|
||||||
|
|
||||||
|
// Recursively extract nested struct types from field types
|
||||||
|
if nestedType := extractAPIType(field.Type, schema); nestedType != nil {
|
||||||
|
if nestedType.Kind == TypeKindStruct {
|
||||||
|
schema.Types[nestedType.Name] = *nestedType
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we have exactly one embedded struct and no regular fields, mark it as an extension
|
||||||
|
if len(embeddedStructs) == 1 && len(regularFields) == 0 {
|
||||||
|
apiType.Kind = TypeKindExtension
|
||||||
|
apiType.Extends = embeddedStructs[0]
|
||||||
|
} else {
|
||||||
|
apiType.Fields = regularFields
|
||||||
|
}
|
||||||
|
|
||||||
|
return apiType
|
||||||
|
}
|
||||||
|
|
||||||
|
// extractInterfaceType handles interface types
|
||||||
|
func extractInterfaceType(t reflect.Type) *APIType {
|
||||||
|
return &APIType{
|
||||||
|
Name: t.String(),
|
||||||
|
Kind: TypeKindInterface,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// extractBasicType handles basic Go types
|
||||||
|
func extractBasicType(t reflect.Type) *APIType {
|
||||||
|
if isBasicType(t.String()) {
|
||||||
|
return &APIType{
|
||||||
|
Name: t.String(),
|
||||||
|
Kind: TypeKindBasic,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// buildAPIField constructs an APIField from a reflect.StructField
|
||||||
|
func buildAPIField(field reflect.StructField) APIField {
|
||||||
|
apiField := APIField{
|
||||||
|
Name: field.Name,
|
||||||
|
JSONName: field.Name, // Default to field name
|
||||||
|
Type: field.Type.String(),
|
||||||
|
IsExported: field.IsExported(),
|
||||||
|
Tag: string(field.Tag),
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse JSON tag
|
||||||
|
if jsonTag := field.Tag.Get("json"); jsonTag != "" {
|
||||||
|
if jsonName := parseJSONTag(jsonTag); jsonName != "" {
|
||||||
|
apiField.JSONName = jsonName
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return apiField
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseJSONTag extracts the JSON field name from a JSON tag
|
||||||
|
func parseJSONTag(jsonTag string) string {
|
||||||
|
parts := strings.Split(jsonTag, ",")
|
||||||
|
if len(parts) > 0 && parts[0] != "" && parts[0] != "-" {
|
||||||
|
return parts[0]
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// extractPackageName extracts the package name from a package path
|
||||||
|
func extractPackageName(pkgPath string) string {
|
||||||
|
parts := strings.Split(pkgPath, "/")
|
||||||
|
if len(parts) > 0 {
|
||||||
|
return parts[len(parts)-1]
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetStructTypes returns all struct types from the schema
|
||||||
|
func (s *APISchema) GetStructTypes() []APIType {
|
||||||
|
structs := make([]APIType, 0)
|
||||||
|
for _, apiType := range s.Types {
|
||||||
|
if apiType.Kind == TypeKindStruct {
|
||||||
|
structs = append(structs, apiType)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return structs
|
||||||
|
}
|
||||||
|
|
||||||
|
// getSortedHandlers returns handlers sorted by name
|
||||||
|
func getSortedHandlers(schema *APISchema) []APIHandler {
|
||||||
|
var handlers []APIHandler
|
||||||
|
for _, handler := range schema.Handlers {
|
||||||
|
handlers = append(handlers, handler)
|
||||||
|
}
|
||||||
|
sort.Slice(handlers, func(i, j int) bool {
|
||||||
|
return handlers[i].Name < handlers[j].Name
|
||||||
|
})
|
||||||
|
return handlers
|
||||||
|
}
|
||||||
|
|
||||||
|
// getSortedMethods returns method names sorted alphabetically
|
||||||
|
func getSortedMethods(schema *APISchema) []string {
|
||||||
|
var methods []string
|
||||||
|
for name := range schema.Handlers {
|
||||||
|
methods = append(methods, name)
|
||||||
|
}
|
||||||
|
sort.Strings(methods)
|
||||||
|
return methods
|
||||||
|
}
|
||||||
|
|
||||||
|
// getAllReferencedStructs recursively finds all structs referenced in the API
|
||||||
|
func getAllReferencedStructs(schema *APISchema) []APIType {
|
||||||
|
// Start with all structs found in handlers
|
||||||
|
allStructs := make(map[string]APIType)
|
||||||
|
|
||||||
|
// Add all structs from handlers
|
||||||
|
for _, apiType := range schema.GetStructTypes() {
|
||||||
|
allStructs[apiType.Name] = apiType
|
||||||
|
}
|
||||||
|
|
||||||
|
// Also add all structs from the complete schema that might be referenced
|
||||||
|
for name, apiType := range schema.Types {
|
||||||
|
if apiType.Kind == TypeKindStruct || apiType.Kind == TypeKindExtension {
|
||||||
|
allStructs[name] = apiType
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Recursively find all referenced structs
|
||||||
|
changed := true
|
||||||
|
for changed {
|
||||||
|
changed = false
|
||||||
|
for _, apiType := range allStructs {
|
||||||
|
referencedStructs := findReferencedStructs(apiType, schema)
|
||||||
|
for _, refStruct := range referencedStructs {
|
||||||
|
if _, exists := allStructs[refStruct.Name]; !exists {
|
||||||
|
allStructs[refStruct.Name] = refStruct
|
||||||
|
changed = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert map to slice
|
||||||
|
var result []APIType
|
||||||
|
for _, apiType := range allStructs {
|
||||||
|
result = append(result, apiType)
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// findReferencedStructs finds structs referenced in a given API type
|
||||||
|
func findReferencedStructs(apiType APIType, schema *APISchema) []APIType {
|
||||||
|
var referenced []APIType
|
||||||
|
|
||||||
|
for _, field := range apiType.Fields {
|
||||||
|
if isStructType(field.Type) {
|
||||||
|
structName := extractStructName(field.Type)
|
||||||
|
if structType, exists := schema.Types[structName]; exists {
|
||||||
|
referenced = append(referenced, structType)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return referenced
|
||||||
|
}
|
||||||
|
|
||||||
|
// isStructType checks if a type string represents a struct
|
||||||
|
func isStructType(typeStr string) bool {
|
||||||
|
// Check if it's a custom type (not basic Go types)
|
||||||
|
return !isBasicType(typeStr) &&
|
||||||
|
!strings.HasPrefix(typeStr, "[]") &&
|
||||||
|
!strings.HasPrefix(typeStr, "map[") &&
|
||||||
|
!strings.HasPrefix(typeStr, "*") &&
|
||||||
|
!strings.HasPrefix(typeStr, "[")
|
||||||
|
}
|
||||||
|
|
||||||
|
// extractStructName extracts the struct name from a type string
|
||||||
|
func extractStructName(typeStr string) string {
|
||||||
|
// Remove array/slice prefixes
|
||||||
|
if strings.HasPrefix(typeStr, "[]") {
|
||||||
|
return typeStr[2:]
|
||||||
|
}
|
||||||
|
if strings.HasPrefix(typeStr, "*") {
|
||||||
|
return typeStr[1:]
|
||||||
|
}
|
||||||
|
return typeStr
|
||||||
|
}
|
||||||
|
|
||||||
|
// isBasicType checks if a type is a basic Go type
|
||||||
|
func isBasicType(typeName string) bool {
|
||||||
|
basicTypes := map[string]bool{
|
||||||
|
"bool": true,
|
||||||
|
"string": true,
|
||||||
|
"int": true,
|
||||||
|
"int8": true,
|
||||||
|
"int16": true,
|
||||||
|
"int32": true,
|
||||||
|
"int64": true,
|
||||||
|
"uint": true,
|
||||||
|
"uint8": true,
|
||||||
|
"uint16": true,
|
||||||
|
"uint32": true,
|
||||||
|
"uint64": true,
|
||||||
|
"uintptr": true,
|
||||||
|
"float32": true,
|
||||||
|
"float64": true,
|
||||||
|
"complex64": true,
|
||||||
|
"complex128": true,
|
||||||
|
"byte": true,
|
||||||
|
"rune": true,
|
||||||
|
"error": true,
|
||||||
|
}
|
||||||
|
|
||||||
|
return basicTypes[typeName]
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,84 @@
|
||||||
|
package main
|
||||||
|
|
||||||
|
// TypeKind represents the kind of API type
|
||||||
|
type TypeKind string
|
||||||
|
|
||||||
|
const (
|
||||||
|
// TypeKindStruct represents a struct type
|
||||||
|
TypeKindStruct TypeKind = "struct"
|
||||||
|
// TypeKindInterface represents an interface type
|
||||||
|
TypeKindInterface TypeKind = "interface"
|
||||||
|
// TypeKindBasic represents a basic Go type
|
||||||
|
TypeKindBasic TypeKind = "basic"
|
||||||
|
// TypeKindMap represents a map type
|
||||||
|
TypeKindMap TypeKind = "map"
|
||||||
|
// TypeKindSlice represents a slice type
|
||||||
|
TypeKindSlice TypeKind = "slice"
|
||||||
|
// TypeKindArray represents an array type
|
||||||
|
TypeKindArray TypeKind = "array"
|
||||||
|
// TypeKindPointer represents a pointer type
|
||||||
|
TypeKindPointer TypeKind = "pointer"
|
||||||
|
// TypeKindExtension represents a struct that extends another struct
|
||||||
|
TypeKindExtension TypeKind = "extension"
|
||||||
|
)
|
||||||
|
|
||||||
|
// APIType represents a type used in the JSON-RPC API
|
||||||
|
type APIType struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
Package string `json:"package"`
|
||||||
|
Kind TypeKind `json:"kind"`
|
||||||
|
Fields []APIField `json:"fields,omitempty"`
|
||||||
|
Extends string `json:"extends,omitempty"`
|
||||||
|
IsPointer bool `json:"is_pointer"`
|
||||||
|
IsSlice bool `json:"is_slice"`
|
||||||
|
IsMap bool `json:"is_map"`
|
||||||
|
MapKeyType string `json:"map_key_type,omitempty"`
|
||||||
|
MapValueType string `json:"map_value_type,omitempty"`
|
||||||
|
SliceType string `json:"slice_type,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// APIField represents a field in a struct
|
||||||
|
type APIField struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
JSONName string `json:"json_name"`
|
||||||
|
Type string `json:"type"`
|
||||||
|
IsExported bool `json:"is_exported"`
|
||||||
|
Tag string `json:"tag"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// APIParameter represents a parameter in a JSON-RPC handler
|
||||||
|
type APIParameter struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
Type string `json:"type"`
|
||||||
|
APIType *APIType `json:"api_type,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// APIReturnValue represents a return value from a JSON-RPC handler
|
||||||
|
type APIReturnValue struct {
|
||||||
|
Index int `json:"index"`
|
||||||
|
Type string `json:"type"`
|
||||||
|
APIType *APIType `json:"api_type,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// APIHandler represents a complete JSON-RPC handler
|
||||||
|
type APIHandler struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
FunctionType string `json:"function_type"`
|
||||||
|
Parameters []APIParameter `json:"parameters"`
|
||||||
|
ReturnValues []APIReturnValue `json:"return_values"`
|
||||||
|
ParameterNames []string `json:"parameter_names"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// APISchema represents the complete JSON-RPC API schema
|
||||||
|
type APISchema struct {
|
||||||
|
Handlers map[string]APIHandler `json:"handlers"`
|
||||||
|
Types map[string]APIType `json:"types"`
|
||||||
|
TypeCount int `json:"type_count"`
|
||||||
|
HandlerCount int `json:"handler_count"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// StringAliasInfo represents information about a string alias and its constants
|
||||||
|
type StringAliasInfo struct {
|
||||||
|
Name string
|
||||||
|
Constants []string
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,319 @@
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
"text/template"
|
||||||
|
|
||||||
|
"github.com/rs/zerolog/log"
|
||||||
|
)
|
||||||
|
|
||||||
|
// generateTypeScriptTypings generates complete TypeScript definitions
|
||||||
|
func generateTypeScriptTypings(schema *APISchema, searchPath string) string {
|
||||||
|
// Create template functions
|
||||||
|
funcMap := template.FuncMap{
|
||||||
|
"cleanTypeName": cleanTypeName,
|
||||||
|
"goToTypeScriptType": goToTypeScriptType,
|
||||||
|
"getAllStructs": func() []APIType { return getAllReferencedStructs(schema) },
|
||||||
|
"getSortedHandlers": func() []APIHandler { return getSortedHandlers(schema) },
|
||||||
|
"getSortedMethods": func() []string { return getSortedMethods(schema) },
|
||||||
|
"getReturnType": getReturnType,
|
||||||
|
"getParameterList": getParameterList,
|
||||||
|
"hasParameters": hasParameters,
|
||||||
|
"getStringAliasInfo": func() []StringAliasInfo { return getStringAliasInfoWithReflection(searchPath) },
|
||||||
|
"sub": func(a, b int) int { return a - b },
|
||||||
|
"pad": func(s string, width int) string { return padString(s, width) },
|
||||||
|
"padComment": func(fieldName, fieldType string) string { return padComment(fieldName, fieldType) },
|
||||||
|
"isOptionalType": func(goType string) bool { return isOptionalType(goType) },
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse the main template
|
||||||
|
tmpl, err := template.New("typescript").Funcs(funcMap).Parse(typescriptTemplate)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal().Err(err).Msg("Failed to parse TypeScript template")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Execute template
|
||||||
|
var output strings.Builder
|
||||||
|
err = tmpl.Execute(&output, schema)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal().Err(err).Msg("Failed to execute TypeScript template")
|
||||||
|
}
|
||||||
|
|
||||||
|
return output.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// padString pads a string to the specified width
|
||||||
|
func padString(s string, width int) string {
|
||||||
|
if len(s) >= width {
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
return s + strings.Repeat(" ", width-len(s))
|
||||||
|
}
|
||||||
|
|
||||||
|
// padComment calculates the proper padding for field comments
|
||||||
|
func padComment(fieldName, fieldType string) string {
|
||||||
|
// Calculate the length of the field declaration part
|
||||||
|
// Format: " fieldName: fieldType;"
|
||||||
|
declarationLength := 2 + len(fieldName) + 2 + len(fieldType) + 1 // " " + fieldName + ": " + fieldType + ";"
|
||||||
|
|
||||||
|
// Target alignment at column 40
|
||||||
|
targetColumn := 40
|
||||||
|
if declarationLength >= targetColumn {
|
||||||
|
return " " // Just one space if already past target
|
||||||
|
}
|
||||||
|
|
||||||
|
return strings.Repeat(" ", targetColumn-declarationLength)
|
||||||
|
}
|
||||||
|
|
||||||
|
// isOptionalType determines if a Go type should be rendered as an optional TypeScript property
|
||||||
|
func isOptionalType(goType string) bool {
|
||||||
|
return goType == "null.String" || goType == "null.Bool" || goType == "null.Int"
|
||||||
|
}
|
||||||
|
|
||||||
|
// cleanTypeName cleans up Go type names for TypeScript
|
||||||
|
func cleanTypeName(typeName string) string {
|
||||||
|
// Remove package prefixes
|
||||||
|
if strings.Contains(typeName, ".") {
|
||||||
|
parts := strings.Split(typeName, ".")
|
||||||
|
return parts[len(parts)-1]
|
||||||
|
}
|
||||||
|
return typeName
|
||||||
|
}
|
||||||
|
|
||||||
|
// goToTypeScriptType converts Go types to TypeScript types with recursive parsing
|
||||||
|
func goToTypeScriptType(goType string) string {
|
||||||
|
return parseTypeRecursively(goType)
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseTypeRecursively parses Go types recursively to handle complex nested types
|
||||||
|
func parseTypeRecursively(goType string) string {
|
||||||
|
// Remove any leading/trailing whitespace
|
||||||
|
goType = strings.TrimSpace(goType)
|
||||||
|
|
||||||
|
// Handle basic types first
|
||||||
|
switch goType {
|
||||||
|
case "bool":
|
||||||
|
return "boolean"
|
||||||
|
case "string":
|
||||||
|
return "string"
|
||||||
|
case "int", "int8", "int16", "int32", "int64":
|
||||||
|
return "number"
|
||||||
|
case "uint", "uint8", "uint16", "uint32", "uint64":
|
||||||
|
return "number"
|
||||||
|
case "float32", "float64":
|
||||||
|
return "number"
|
||||||
|
case "byte":
|
||||||
|
return "number"
|
||||||
|
case "rune":
|
||||||
|
return "string"
|
||||||
|
case "error":
|
||||||
|
return "string"
|
||||||
|
case "usbgadget.ByteSlice":
|
||||||
|
return "number[]"
|
||||||
|
case "null.String":
|
||||||
|
return "string"
|
||||||
|
case "null.Bool":
|
||||||
|
return "boolean"
|
||||||
|
case "null.Int":
|
||||||
|
return "number"
|
||||||
|
case "interface {}":
|
||||||
|
return "any"
|
||||||
|
case "time.Duration":
|
||||||
|
return "number"
|
||||||
|
case "time.Time":
|
||||||
|
return "string"
|
||||||
|
case "net.IP":
|
||||||
|
return "string"
|
||||||
|
case "net.IPNet":
|
||||||
|
return "string"
|
||||||
|
case "net.HardwareAddr":
|
||||||
|
return "string"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle pointer types
|
||||||
|
if strings.HasPrefix(goType, "*") {
|
||||||
|
innerType := parseTypeRecursively(goType[1:])
|
||||||
|
return innerType + " | null"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle slice types
|
||||||
|
if strings.HasPrefix(goType, "[]") {
|
||||||
|
elementType := parseTypeRecursively(goType[2:])
|
||||||
|
return elementType + "[]"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle map types with proper bracket matching
|
||||||
|
if strings.HasPrefix(goType, "map[") {
|
||||||
|
return parseMapType(goType)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle any remaining interface {} in complex types
|
||||||
|
if strings.Contains(goType, "interface {}") {
|
||||||
|
return strings.ReplaceAll(goType, "interface {}", "any")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if this is a string alias (type name != underlying type)
|
||||||
|
if isStringAlias(goType) {
|
||||||
|
return cleanTypeName(goType)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return cleaned custom type name
|
||||||
|
return cleanTypeName(goType)
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseMapType parses map types with proper bracket matching
|
||||||
|
func parseMapType(goType string) string {
|
||||||
|
if !strings.HasPrefix(goType, "map[") {
|
||||||
|
return goType
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find the key type and value type
|
||||||
|
start := 4 // After "map["
|
||||||
|
bracketCount := 0
|
||||||
|
keyEnd := -1
|
||||||
|
|
||||||
|
// Find the end of the key type by looking for the first ']' at bracket level 0
|
||||||
|
for i := start; i < len(goType); i++ {
|
||||||
|
char := goType[i]
|
||||||
|
if char == '[' {
|
||||||
|
bracketCount++
|
||||||
|
} else if char == ']' {
|
||||||
|
if bracketCount == 0 {
|
||||||
|
keyEnd = i
|
||||||
|
break
|
||||||
|
}
|
||||||
|
bracketCount--
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if keyEnd == -1 || keyEnd >= len(goType)-1 {
|
||||||
|
return goType // Invalid map type
|
||||||
|
}
|
||||||
|
|
||||||
|
keyType := goType[start:keyEnd]
|
||||||
|
valueType := goType[keyEnd+1:]
|
||||||
|
|
||||||
|
// Parse key and value types recursively
|
||||||
|
tsKeyType := parseTypeRecursively(keyType)
|
||||||
|
tsValueType := parseTypeRecursively(valueType)
|
||||||
|
|
||||||
|
return fmt.Sprintf("Record<%s, %s>", tsKeyType, tsValueType)
|
||||||
|
}
|
||||||
|
|
||||||
|
// isStringAlias checks if a type is a string alias
|
||||||
|
func isStringAlias(typeName string) bool {
|
||||||
|
// Known string aliases in the codebase
|
||||||
|
stringAliases := map[string]bool{
|
||||||
|
"VirtualMediaMode": true,
|
||||||
|
"VirtualMediaSource": true,
|
||||||
|
}
|
||||||
|
return stringAliases[typeName]
|
||||||
|
}
|
||||||
|
|
||||||
|
// getReturnType returns the TypeScript return type for a handler
|
||||||
|
func getReturnType(handler APIHandler) string {
|
||||||
|
if len(handler.ReturnValues) == 0 {
|
||||||
|
return "void"
|
||||||
|
} else if len(handler.ReturnValues) == 1 {
|
||||||
|
return goToTypeScriptType(handler.ReturnValues[0].Type)
|
||||||
|
} else {
|
||||||
|
// Multiple return values - use tuple type
|
||||||
|
var returnTypes []string
|
||||||
|
for _, retVal := range handler.ReturnValues {
|
||||||
|
returnTypes = append(returnTypes, goToTypeScriptType(retVal.Type))
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("[%s]", strings.Join(returnTypes, ", "))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// getParameterList returns the TypeScript parameter list for a handler
|
||||||
|
func getParameterList(handler APIHandler) string {
|
||||||
|
if len(handler.Parameters) == 0 {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
var paramList []string
|
||||||
|
for _, param := range handler.Parameters {
|
||||||
|
tsType := goToTypeScriptType(param.Type)
|
||||||
|
paramList = append(paramList, fmt.Sprintf("%s: %s", param.Name, tsType))
|
||||||
|
}
|
||||||
|
return strings.Join(paramList, ", ")
|
||||||
|
}
|
||||||
|
|
||||||
|
// hasParameters returns true if the handler has parameters
|
||||||
|
func hasParameters(handler APIHandler) bool {
|
||||||
|
return len(handler.Parameters) > 0
|
||||||
|
}
|
||||||
|
|
||||||
|
// typescriptTemplate is the main template for generating TypeScript definitions
|
||||||
|
const typescriptTemplate = `// Code generated by generate_typings.go. DO NOT EDIT.
|
||||||
|
{{range $struct := getAllStructs}}
|
||||||
|
{{if eq $struct.Kind "extension"}}
|
||||||
|
export interface {{cleanTypeName $struct.Name}} extends {{cleanTypeName $struct.Extends}} {
|
||||||
|
}
|
||||||
|
{{else}}
|
||||||
|
export interface {{cleanTypeName $struct.Name}} {
|
||||||
|
{{range $field := $struct.Fields}} {{$field.JSONName}}{{if isOptionalType $field.Type}}?{{end}}: {{goToTypeScriptType $field.Type}};{{padComment $field.JSONName (goToTypeScriptType $field.Type)}}// {{$field.Name}} {{$field.Type}}
|
||||||
|
{{end}}}
|
||||||
|
{{end}}
|
||||||
|
{{end}}
|
||||||
|
// String aliases with constants
|
||||||
|
{{range $alias := getStringAliasInfo}}
|
||||||
|
export type {{$alias.Name}} = {{range $i, $const := $alias.Constants}}"{{$const}}"{{if lt $i (sub (len $alias.Constants) 1)}} | {{end}}{{end}};
|
||||||
|
{{end}}
|
||||||
|
|
||||||
|
// JSON-RPC Types
|
||||||
|
export interface JsonRpcRequest {
|
||||||
|
jsonrpc: "2.0";
|
||||||
|
method: string;
|
||||||
|
params?: unknown;
|
||||||
|
id: number | string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface JsonRpcError {
|
||||||
|
code: number;
|
||||||
|
data?: string;
|
||||||
|
message: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface JsonRpcSuccessResponse {
|
||||||
|
jsonrpc: "2.0";
|
||||||
|
result: unknown;
|
||||||
|
id: number | string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface JsonRpcErrorResponse {
|
||||||
|
jsonrpc: "2.0";
|
||||||
|
error: JsonRpcError;
|
||||||
|
id: number | string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type JsonRpcResponse = JsonRpcSuccessResponse | JsonRpcErrorResponse;
|
||||||
|
|
||||||
|
// RPC Functions
|
||||||
|
export class JsonRpcClient {
|
||||||
|
constructor(private send: (method: string, params: unknown, callback?: (resp: JsonRpcResponse) => void) => void) {}
|
||||||
|
|
||||||
|
private async sendAsync<T>(method: string, params?: unknown): Promise<T> {
|
||||||
|
return new Promise<T>((resolve, reject) => {
|
||||||
|
this.send(method, params, (response: JsonRpcResponse) => {
|
||||||
|
if ('error' in response) {
|
||||||
|
reject(new Error('RPC error: ' + response.error.message));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
resolve(response.result as T);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
{{range $handler := getSortedHandlers}}
|
||||||
|
async {{$handler.Name}}({{getParameterList $handler}}) {
|
||||||
|
{{if eq (len $handler.Parameters) 0}} return this.sendAsync<{{getReturnType $handler}}>('{{$handler.Name}}');
|
||||||
|
{{else}} return this.sendAsync<{{getReturnType $handler}}>('{{$handler.Name}}', {
|
||||||
|
{{range $param := $handler.Parameters}} {{$param.Name}},
|
||||||
|
{{end}} });
|
||||||
|
{{end}} }
|
||||||
|
|
||||||
|
{{end}}}
|
||||||
|
`
|
||||||
|
|
@ -0,0 +1,77 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
#
|
||||||
|
# Exit immediately if a command exits with a non-zero status
|
||||||
|
set -e
|
||||||
|
|
||||||
|
C_RST="$(tput sgr0)"
|
||||||
|
C_ERR="$(tput setaf 1)"
|
||||||
|
C_OK="$(tput setaf 2)"
|
||||||
|
C_WARN="$(tput setaf 3)"
|
||||||
|
C_INFO="$(tput setaf 5)"
|
||||||
|
|
||||||
|
msg() { printf '%s%s%s\n' $2 "$1" $C_RST; }
|
||||||
|
|
||||||
|
msg_info() { msg "$1" $C_INFO; }
|
||||||
|
msg_ok() { msg "$1" $C_OK; }
|
||||||
|
msg_err() { msg "$1" $C_ERR; }
|
||||||
|
msg_warn() { msg "$1" $C_WARN; }
|
||||||
|
|
||||||
|
# Get the latest release information
|
||||||
|
msg_info "Getting latest release information ..."
|
||||||
|
LATEST_RELEASE=$(curl -s \
|
||||||
|
-H "Accept: application/vnd.github+json" \
|
||||||
|
-H "X-GitHub-Api-Version: 2022-11-28" \
|
||||||
|
https://api.github.com/repos/netbootxyz/netboot.xyz/releases | jq '
|
||||||
|
[.[] | select(.prerelease == false and .draft == false and .assets != null and (.assets | length > 0))] |
|
||||||
|
sort_by(.created_at) |
|
||||||
|
.[-1]')
|
||||||
|
|
||||||
|
# Extract version, download URL, and digest
|
||||||
|
VERSION=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
|
||||||
|
ISO_URL=$(echo "$LATEST_RELEASE" | jq -r '.assets[] | select(.name == "netboot.xyz-multiarch.iso") | .browser_download_url')
|
||||||
|
EXPECTED_CHECKSUM=$(echo "$LATEST_RELEASE" | jq -r '.assets[] | select(.name == "netboot.xyz-multiarch.iso") | .digest' | sed 's/sha256://')
|
||||||
|
|
||||||
|
msg_ok "Latest version: $VERSION"
|
||||||
|
msg_ok "ISO URL: $ISO_URL"
|
||||||
|
msg_ok "Expected SHA256: $EXPECTED_CHECKSUM"
|
||||||
|
|
||||||
|
|
||||||
|
# Check if we already have the same version
|
||||||
|
if [ -f "resource/netboot.xyz-multiarch.iso" ]; then
|
||||||
|
msg_info "Checking current resource file ..."
|
||||||
|
|
||||||
|
# First check by checksum (fastest)
|
||||||
|
CURRENT_CHECKSUM=$(shasum -a 256 resource/netboot.xyz-multiarch.iso | awk '{print $1}')
|
||||||
|
|
||||||
|
if [ "$CURRENT_CHECKSUM" = "$EXPECTED_CHECKSUM" ]; then
|
||||||
|
msg_ok "Resource file is already up to date (version $VERSION). No update needed."
|
||||||
|
exit 0
|
||||||
|
else
|
||||||
|
msg_info "Checksums differ, proceeding with download ..."
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Download ISO file
|
||||||
|
TMP_ISO=$(mktemp -t netbootxyziso)
|
||||||
|
msg_info "Downloading ISO file ..."
|
||||||
|
curl -L -o "$TMP_ISO" "$ISO_URL"
|
||||||
|
|
||||||
|
# Verify SHA256 checksum
|
||||||
|
msg_info "Verifying SHA256 checksum ..."
|
||||||
|
ACTUAL_CHECKSUM=$(shasum -a 256 "$TMP_ISO" | awk '{print $1}')
|
||||||
|
|
||||||
|
if [ "$EXPECTED_CHECKSUM" = "$ACTUAL_CHECKSUM" ]; then
|
||||||
|
msg_ok "Verified SHA256 checksum."
|
||||||
|
mv -f "$TMP_ISO" "resource/netboot.xyz-multiarch.iso"
|
||||||
|
msg_ok "Updated ISO file."
|
||||||
|
git add "resource/netboot.xyz-multiarch.iso"
|
||||||
|
git commit -m "chore: update netboot.xyz-multiarch.iso to $VERSION"
|
||||||
|
msg_ok "Committed changes."
|
||||||
|
msg_ok "You can now push the changes to the remote repository."
|
||||||
|
exit 0
|
||||||
|
else
|
||||||
|
msg_err "Inconsistent SHA256 checksum."
|
||||||
|
msg_err "Expected: $EXPECTED_CHECKSUM"
|
||||||
|
msg_err "Actual: $ACTUAL_CHECKSUM"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
@ -6,27 +6,34 @@
|
||||||
<!-- These are the fonts used in the app -->
|
<!-- These are the fonts used in the app -->
|
||||||
<link
|
<link
|
||||||
rel="preload"
|
rel="preload"
|
||||||
href="/fonts/CircularXXWeb-Medium.woff2"
|
href="./public/fonts/CircularXXWeb-Medium.woff2"
|
||||||
as="font"
|
as="font"
|
||||||
type="font/woff2"
|
type="font/woff2"
|
||||||
crossorigin
|
crossorigin
|
||||||
/>
|
/>
|
||||||
<link
|
<link
|
||||||
rel="preload"
|
rel="preload"
|
||||||
href="/fonts/CircularXXWeb-Book.woff2"
|
href="./public/fonts/CircularXXWeb-Book.woff2"
|
||||||
as="font"
|
as="font"
|
||||||
type="font/woff2"
|
type="font/woff2"
|
||||||
crossorigin
|
crossorigin
|
||||||
/>
|
/>
|
||||||
<link
|
<link
|
||||||
rel="preload"
|
rel="preload"
|
||||||
href="/fonts/CircularXXWeb-Regular.woff2"
|
href="./public/fonts/CircularXXWeb-Regular.woff2"
|
||||||
|
as="font"
|
||||||
|
type="font/woff2"
|
||||||
|
crossorigin
|
||||||
|
/>
|
||||||
|
<link
|
||||||
|
rel="preload"
|
||||||
|
href="./public/fonts/CircularXXWeb-Black.woff2"
|
||||||
as="font"
|
as="font"
|
||||||
type="font/woff2"
|
type="font/woff2"
|
||||||
crossorigin
|
crossorigin
|
||||||
/>
|
/>
|
||||||
<title>JetKVM</title>
|
<title>JetKVM</title>
|
||||||
<link rel="stylesheet" href="/fonts/fonts.css" />
|
<link rel="stylesheet" href="./public/fonts/fonts.css" />
|
||||||
<link rel="icon" type="image/png" href="/favicon-96x96.png" sizes="96x96" />
|
<link rel="icon" type="image/png" href="/favicon-96x96.png" sizes="96x96" />
|
||||||
<link rel="icon" type="image/svg+xml" href="/favicon.svg" />
|
<link rel="icon" type="image/svg+xml" href="/favicon.svg" />
|
||||||
<link rel="shortcut icon" href="/favicon.ico" />
|
<link rel="shortcut icon" href="/favicon.ico" />
|
||||||
|
|
@ -36,23 +43,21 @@
|
||||||
<meta name="theme-color" content="#051946" />
|
<meta name="theme-color" content="#051946" />
|
||||||
<meta name="description" content="A web-based KVM console for managing remote servers." />
|
<meta name="description" content="A web-based KVM console for managing remote servers." />
|
||||||
<script>
|
<script>
|
||||||
// Initial theme setup
|
function applyThemeFromPreference() {
|
||||||
document.documentElement.classList.toggle(
|
// dark theme setup
|
||||||
"dark",
|
var darkDesired = localStorage.theme === "dark" ||
|
||||||
localStorage.theme === "dark" ||
|
|
||||||
(!("theme" in localStorage) &&
|
(!("theme" in localStorage) &&
|
||||||
window.matchMedia("(prefers-color-scheme: dark)").matches),
|
window.matchMedia("(prefers-color-scheme: dark)").matches)
|
||||||
);
|
|
||||||
|
document.documentElement.classList.toggle("dark", darkDesired)
|
||||||
|
}
|
||||||
|
|
||||||
|
// initial theme application
|
||||||
|
applyThemeFromPreference();
|
||||||
|
|
||||||
// Listen for system theme changes
|
// Listen for system theme changes
|
||||||
window
|
window.matchMedia("(prefers-color-scheme: dark)").addEventListener("change", applyThemeFromPreference);
|
||||||
.matchMedia("(prefers-color-scheme: dark)")
|
window.matchMedia("(prefers-color-scheme: light)").addEventListener("change", applyThemeFromPreference);
|
||||||
.addEventListener("change", ({ matches }) => {
|
|
||||||
if (!("theme" in localStorage)) {
|
|
||||||
// Only auto-switch if user hasn't manually set a theme
|
|
||||||
document.documentElement.classList.toggle("dark", matches);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
</script>
|
</script>
|
||||||
</head>
|
</head>
|
||||||
<body
|
<body
|
||||||
|
|
|
||||||
|
|
@ -1,2 +0,0 @@
|
||||||
User-agent: *
|
|
||||||
Disallow: /
|
|
||||||
|
|
@ -31,6 +31,7 @@ export default defineConfig(({ mode, command }) => {
|
||||||
esbuild: {
|
esbuild: {
|
||||||
pure: ["console.debug"],
|
pure: ["console.debug"],
|
||||||
},
|
},
|
||||||
|
assetsInclude: ["**/*.woff2"],
|
||||||
build: {
|
build: {
|
||||||
outDir: isCloud ? "dist" : "../static",
|
outDir: isCloud ? "dist" : "../static",
|
||||||
rollupOptions: {
|
rollupOptions: {
|
||||||
|
|
|
||||||
16
web.go
16
web.go
|
|
@ -69,8 +69,7 @@ type SetupRequest struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
var cachableFileExtensions = []string{
|
var cachableFileExtensions = []string{
|
||||||
".jpg", ".jpeg", ".png", ".gif", ".webp", ".woff2",
|
".jpg", ".jpeg", ".png", ".svg", ".gif", ".webp", ".ico", ".woff2",
|
||||||
".ico",
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func setupRouter() *gin.Engine {
|
func setupRouter() *gin.Engine {
|
||||||
|
|
@ -83,7 +82,10 @@ func setupRouter() *gin.Engine {
|
||||||
}),
|
}),
|
||||||
))
|
))
|
||||||
|
|
||||||
staticFS, _ := fs.Sub(staticFiles, "static")
|
staticFS, err := fs.Sub(staticFiles, "static")
|
||||||
|
if err != nil {
|
||||||
|
logger.Fatal().Err(err).Msg("failed to get rooted static files subdirectory")
|
||||||
|
}
|
||||||
staticFileServer := http.StripPrefix("/static", statigz.FileServer(
|
staticFileServer := http.StripPrefix("/static", statigz.FileServer(
|
||||||
staticFS.(fs.ReadDirFS),
|
staticFS.(fs.ReadDirFS),
|
||||||
))
|
))
|
||||||
|
|
@ -109,9 +111,17 @@ func setupRouter() *gin.Engine {
|
||||||
c.Next()
|
c.Next()
|
||||||
})
|
})
|
||||||
|
|
||||||
|
r.GET("/robots.txt", func(c *gin.Context) {
|
||||||
|
c.Header("Content-Type", "text/plain")
|
||||||
|
c.Header("Cache-Control", "public, max-age=31536000, immutable") // Cache for 1 year
|
||||||
|
c.String(http.StatusOK, "User-agent: *\nDisallow: /")
|
||||||
|
})
|
||||||
|
|
||||||
r.Any("/static/*w", func(c *gin.Context) {
|
r.Any("/static/*w", func(c *gin.Context) {
|
||||||
staticFileServer.ServeHTTP(c.Writer, c.Request)
|
staticFileServer.ServeHTTP(c.Writer, c.Request)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
// Public routes (no authentication required)
|
||||||
r.POST("/auth/login-local", handleLogin)
|
r.POST("/auth/login-local", handleLogin)
|
||||||
|
|
||||||
// We use this to determine if the device is setup
|
// We use this to determine if the device is setup
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue