Compare commits
107 commits
| Author | SHA1 | Date | |
|---|---|---|---|
| b614c97f42 | |||
| 2f87f0fd98 | |||
| d71f00357e | |||
| d6e71cde12 | |||
| bc0b98c7f9 | |||
| 84b1c67c72 | |||
| cb135b7caa | |||
| c8f91e8d02 | |||
| 16b07df33e | |||
| 3923d20ae9 | |||
| 940c3d694d | |||
| 80d252738f | |||
| 9914f51140 | |||
| f5fb9ec96b | |||
| 5bcd3581dd | |||
| e5b6dcd187 | |||
| eaaafbc040 | |||
| 637c7e0b56 | |||
| 618842b285 | |||
| a382e8dc79 | |||
| 579e6a1eee | |||
| f9ff2c0d62 | |||
| 2ca32769d5 | |||
| 4e78cefd56 | |||
|
|
629358a81b | ||
|
|
f7e45137df | ||
| f3b18c6b08 | |||
| 32459b420b | |||
| 4bd45e7d3c | |||
| df70be2c4f | |||
| d992074c9c | |||
| 6ad6876051 | |||
| b4c89e7d90 | |||
| 4d61a6913c | |||
| ed1a562c2c | |||
| bc9d95e8d5 | |||
| abf0c86ab0 | |||
| 4afc0fd8ce | |||
| c9b448d508 | |||
| db607ac696 | |||
| 6bba227654 | |||
| cc19ee5c0f | |||
| e4a5e7f715 | |||
| 3f6ddfbcd4 | |||
| 55b2e7f6cb | |||
| b6248e409f | |||
| 11276fcf25 | |||
| 0c5b73154d | |||
| ce7336324f | |||
| 15c1d6733c | |||
| 4e5064d07a | |||
| b89db8dc48 | |||
| 59cb09a8e7 | |||
| 9130c09e56 | |||
| cf7dbf5bee | |||
| ac847f34ca | |||
| 6e1af5ba28 | |||
| ad98a1e6c4 | |||
| e04bcfb306 | |||
| 3c2a0129c0 | |||
| a4d2e919dc | |||
| 8ea865b2ce | |||
| d3b4bef531 | |||
| bb57b23224 | |||
| 572e27589b | |||
| d0926c2c1d | |||
| 5145816b0a | |||
| 1f4732823d | |||
| 24853bfce8 | |||
| 07330be10b | |||
| d63c1fd77b | |||
| 9bdc9c30c6 | |||
| ba93931c33 | |||
| 0114b01fb3 | |||
| fb75758986 | |||
| b52a5bfdb7 | |||
| 6ccd801c89 | |||
| 20b65e280d | |||
| 52c108f7d3 | |||
| 2c9ecd7caf | |||
| 08025903ad | |||
| cb441b112c | |||
| 84c55311d1 | |||
| 32e7a79c71 | |||
| ddb75f1aeb | |||
| 7b1356f5af | |||
| db4574b887 | |||
| 4509611185 | |||
| 34970ac9d9 | |||
| 4dff61074d | |||
| 91d69db475 | |||
| 2cc5a3270b | |||
| 07734c6ee4 | |||
| 0bed650685 | |||
| 2d86b3ad21 | |||
| 0c7767dc41 | |||
| 26871decd0 | |||
| c5aeb16e16 | |||
| c2d1ec0842 | |||
| 3d5a3f2aa1 | |||
| 9467675715 | |||
| ef597c5f22 | |||
| 92c30d4cad | |||
| 5a1c556593 | |||
| ada4c6846d | |||
| d0a55afcbf | |||
| 41ffa93412 |
196 changed files with 9136 additions and 2792 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
|
@ -1,3 +1,4 @@
|
||||||
.cache
|
.cache
|
||||||
.gocache
|
.gocache
|
||||||
.build
|
.build
|
||||||
|
.worktrees
|
||||||
|
|
|
||||||
48
cmd/commit_message.go
Normal file
48
cmd/commit_message.go
Normal file
|
|
@ -0,0 +1,48 @@
|
||||||
|
/*
|
||||||
|
Copyright © 2025 Lewis Wynne <lew@ily.rs>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"text/template"
|
||||||
|
)
|
||||||
|
|
||||||
|
// renderCommitMessage renders a commit message template. It extends the
|
||||||
|
// shared template FuncMap with {{ summary }}, which returns the action
|
||||||
|
// description for the current commit. On any error the raw template string
|
||||||
|
// is returned so that commits are never blocked by a bad template.
|
||||||
|
func renderCommitMessage(tmpl string, summary string) string {
|
||||||
|
funcMap := templateFuncMap()
|
||||||
|
funcMap["summary"] = func() string { return summary }
|
||||||
|
|
||||||
|
t, err := template.New("commit").Option("missingkey=zero").Funcs(funcMap).Parse(tmpl)
|
||||||
|
if err != nil {
|
||||||
|
return tmpl
|
||||||
|
}
|
||||||
|
|
||||||
|
var buf bytes.Buffer
|
||||||
|
if err := t.Execute(&buf, nil); err != nil {
|
||||||
|
return tmpl
|
||||||
|
}
|
||||||
|
return buf.String()
|
||||||
|
}
|
||||||
53
cmd/commit_message_test.go
Normal file
53
cmd/commit_message_test.go
Normal file
|
|
@ -0,0 +1,53 @@
|
||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestRenderCommitMessage(t *testing.T) {
|
||||||
|
t.Run("summary and time", func(t *testing.T) {
|
||||||
|
msg := renderCommitMessage("{{ summary }} {{ time }}", "set foo")
|
||||||
|
if !strings.HasPrefix(msg, "set foo ") {
|
||||||
|
t.Errorf("expected prefix 'set foo ', got %q", msg)
|
||||||
|
}
|
||||||
|
parts := strings.SplitN(msg, " ", 3)
|
||||||
|
if len(parts) < 3 || !strings.Contains(parts[2], "T") {
|
||||||
|
t.Errorf("expected RFC3339 time, got %q", msg)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("empty summary", func(t *testing.T) {
|
||||||
|
msg := renderCommitMessage("{{ summary }} {{ time }}", "")
|
||||||
|
if !strings.HasPrefix(msg, " ") {
|
||||||
|
t.Errorf("expected leading space (empty summary), got %q", msg)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("default function", func(t *testing.T) {
|
||||||
|
msg := renderCommitMessage(`{{ default "sync" (summary) }}`, "")
|
||||||
|
if msg != "sync" {
|
||||||
|
t.Errorf("expected 'sync', got %q", msg)
|
||||||
|
}
|
||||||
|
msg = renderCommitMessage(`{{ default "sync" (summary) }}`, "set foo")
|
||||||
|
if msg != "set foo" {
|
||||||
|
t.Errorf("expected 'set foo', got %q", msg)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("env function", func(t *testing.T) {
|
||||||
|
t.Setenv("PDA_TEST_USER", "alice")
|
||||||
|
msg := renderCommitMessage(`{{ env "PDA_TEST_USER" }}: {{ summary }}`, "set foo")
|
||||||
|
if msg != "alice: set foo" {
|
||||||
|
t.Errorf("expected 'alice: set foo', got %q", msg)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("bad template returns raw", func(t *testing.T) {
|
||||||
|
raw := "{{ bad template"
|
||||||
|
msg := renderCommitMessage(raw, "test")
|
||||||
|
if msg != raw {
|
||||||
|
t.Errorf("expected raw %q, got %q", raw, msg)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
83
cmd/completions.go
Normal file
83
cmd/completions.go
Normal file
|
|
@ -0,0 +1,83 @@
|
||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
)
|
||||||
|
|
||||||
|
// completeKeys returns key[@store] completions for the current toComplete prefix.
|
||||||
|
// It handles three cases:
|
||||||
|
// - No "@" typed yet: return all keys from all stores (as "key@store")
|
||||||
|
// - "@" typed with partial store: return store-scoped completions
|
||||||
|
// - "key@store" with known store: return keys from that store
|
||||||
|
func completeKeys(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
|
||||||
|
store := &Store{}
|
||||||
|
stores, err := store.AllStores()
|
||||||
|
if err != nil || len(stores) == 0 {
|
||||||
|
return nil, cobra.ShellCompDirectiveNoFileComp
|
||||||
|
}
|
||||||
|
|
||||||
|
var completions []string
|
||||||
|
parts := strings.SplitN(toComplete, "@", 2)
|
||||||
|
|
||||||
|
if len(parts) == 2 {
|
||||||
|
// User typed "something@" — complete keys within matching stores.
|
||||||
|
prefix := parts[0]
|
||||||
|
dbFilter := strings.ToLower(parts[1])
|
||||||
|
for _, db := range stores {
|
||||||
|
if !strings.HasPrefix(db, dbFilter) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
keys, err := store.Keys(db)
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
for _, k := range keys {
|
||||||
|
if prefix == "" || strings.HasPrefix(k, strings.ToLower(prefix)) {
|
||||||
|
completions = append(completions, k+"@"+db)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// No "@" yet — offer key@store for every key in every store.
|
||||||
|
lowerPrefix := strings.ToLower(toComplete)
|
||||||
|
for _, db := range stores {
|
||||||
|
keys, err := store.Keys(db)
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
for _, k := range keys {
|
||||||
|
full := k + "@" + db
|
||||||
|
if strings.HasPrefix(full, lowerPrefix) || strings.HasPrefix(k, lowerPrefix) {
|
||||||
|
completions = append(completions, full)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return completions, cobra.ShellCompDirectiveNoFileComp
|
||||||
|
}
|
||||||
|
|
||||||
|
// completeStores returns store name completions.
|
||||||
|
func completeStores(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
|
||||||
|
store := &Store{}
|
||||||
|
stores, err := store.AllStores()
|
||||||
|
if err != nil {
|
||||||
|
return nil, cobra.ShellCompDirectiveNoFileComp
|
||||||
|
}
|
||||||
|
|
||||||
|
var completions []string
|
||||||
|
lowerPrefix := strings.ToLower(toComplete)
|
||||||
|
for _, db := range stores {
|
||||||
|
if strings.HasPrefix(db, lowerPrefix) {
|
||||||
|
completions = append(completions, db)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return completions, cobra.ShellCompDirectiveNoFileComp
|
||||||
|
}
|
||||||
|
|
||||||
|
// completeStoreFlag is a completion function for --store / -s string slice flags.
|
||||||
|
func completeStoreFlag(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
|
||||||
|
return completeStores(cmd, args, toComplete)
|
||||||
|
}
|
||||||
146
cmd/config.go
146
cmd/config.go
|
|
@ -23,6 +23,7 @@ THE SOFTWARE.
|
||||||
package cmd
|
package cmd
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bytes"
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
|
@ -35,27 +36,41 @@ type Config struct {
|
||||||
DisplayAsciiArt bool `toml:"display_ascii_art"`
|
DisplayAsciiArt bool `toml:"display_ascii_art"`
|
||||||
Key KeyConfig `toml:"key"`
|
Key KeyConfig `toml:"key"`
|
||||||
Store StoreConfig `toml:"store"`
|
Store StoreConfig `toml:"store"`
|
||||||
|
List ListConfig `toml:"list"`
|
||||||
Git GitConfig `toml:"git"`
|
Git GitConfig `toml:"git"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type KeyConfig struct {
|
type KeyConfig struct {
|
||||||
AlwaysPromptDelete bool `toml:"always_prompt_delete"`
|
AlwaysPromptDelete bool `toml:"always_prompt_delete"`
|
||||||
AlwaysPromptOverwrite bool `toml:"always_prompt_overwrite"`
|
AlwaysPromptGlobDelete bool `toml:"always_prompt_glob_delete"`
|
||||||
|
AlwaysPromptOverwrite bool `toml:"always_prompt_overwrite"`
|
||||||
|
AlwaysEncrypt bool `toml:"always_encrypt"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type StoreConfig struct {
|
type StoreConfig struct {
|
||||||
DefaultStoreName string `toml:"default_store_name"`
|
DefaultStoreName string `toml:"default_store_name"`
|
||||||
AlwaysPromptDelete bool `toml:"always_prompt_delete"`
|
AlwaysPromptDelete bool `toml:"always_prompt_delete"`
|
||||||
|
AlwaysPromptOverwrite bool `toml:"always_prompt_overwrite"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ListConfig struct {
|
||||||
|
AlwaysShowAllStores bool `toml:"always_show_all_stores"`
|
||||||
|
DefaultListFormat string `toml:"default_list_format"`
|
||||||
|
AlwaysShowFullValues bool `toml:"always_show_full_values"`
|
||||||
|
AlwaysHideHeader bool `toml:"always_hide_header"`
|
||||||
|
DefaultColumns string `toml:"default_columns"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type GitConfig struct {
|
type GitConfig struct {
|
||||||
AutoFetch bool `toml:"auto_fetch"`
|
AutoFetch bool `toml:"auto_fetch"`
|
||||||
AutoCommit bool `toml:"auto_commit"`
|
AutoCommit bool `toml:"auto_commit"`
|
||||||
AutoPush bool `toml:"auto_push"`
|
AutoPush bool `toml:"auto_push"`
|
||||||
|
DefaultCommitMessage string `toml:"default_commit_message"`
|
||||||
}
|
}
|
||||||
|
|
||||||
var (
|
var (
|
||||||
config Config
|
config Config
|
||||||
|
configUndecodedKeys []string
|
||||||
asciiArt string = ` ▄▄
|
asciiArt string = ` ▄▄
|
||||||
██
|
██
|
||||||
██▄███▄ ▄███▄██ ▄█████▄
|
██▄███▄ ▄███▄██ ▄█████▄
|
||||||
|
|
@ -69,82 +84,119 @@ var (
|
||||||
)
|
)
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
config, configErr = loadConfig()
|
var migrations []migration
|
||||||
|
config, configUndecodedKeys, migrations, configErr = loadConfig()
|
||||||
|
for _, m := range migrations {
|
||||||
|
if m.Conflict {
|
||||||
|
warnf("both '%s' and '%s' present; using '%s'", m.Old, m.New, m.New)
|
||||||
|
} else {
|
||||||
|
warnf("config key '%s' is deprecated, use '%s'", m.Old, m.New)
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func defaultConfig() Config {
|
func defaultConfig() Config {
|
||||||
return Config{
|
return Config{
|
||||||
DisplayAsciiArt: true,
|
DisplayAsciiArt: true,
|
||||||
Key: KeyConfig{
|
Key: KeyConfig{
|
||||||
AlwaysPromptDelete: false,
|
AlwaysPromptDelete: false,
|
||||||
AlwaysPromptOverwrite: false,
|
AlwaysPromptGlobDelete: true,
|
||||||
|
AlwaysPromptOverwrite: false,
|
||||||
},
|
},
|
||||||
Store: StoreConfig{
|
Store: StoreConfig{
|
||||||
DefaultStoreName: "default",
|
DefaultStoreName: "store",
|
||||||
AlwaysPromptDelete: true,
|
AlwaysPromptDelete: true,
|
||||||
|
AlwaysPromptOverwrite: true,
|
||||||
|
},
|
||||||
|
List: ListConfig{
|
||||||
|
AlwaysShowAllStores: true,
|
||||||
|
DefaultListFormat: "table",
|
||||||
|
DefaultColumns: "meta,size,ttl,store,key,value",
|
||||||
},
|
},
|
||||||
Git: GitConfig{
|
Git: GitConfig{
|
||||||
AutoFetch: false,
|
AutoFetch: false,
|
||||||
AutoCommit: false,
|
AutoCommit: false,
|
||||||
AutoPush: false,
|
AutoPush: false,
|
||||||
|
DefaultCommitMessage: "{{ summary }} {{ time }}",
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func loadConfig() (Config, error) {
|
// loadConfig returns (config, undecodedKeys, migrations, error).
|
||||||
|
// Migrations are returned but NOT printed — callers decide.
|
||||||
|
func loadConfig() (Config, []string, []migration, error) {
|
||||||
cfg := defaultConfig()
|
cfg := defaultConfig()
|
||||||
|
|
||||||
path, err := configPath()
|
path, err := configPath()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return cfg, err
|
return cfg, nil, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if _, err := os.Stat(path); err != nil {
|
data, err := os.ReadFile(path)
|
||||||
if os.IsNotExist(err) {
|
|
||||||
return cfg, nil
|
|
||||||
}
|
|
||||||
return cfg, err
|
|
||||||
}
|
|
||||||
|
|
||||||
md, err := toml.DecodeFile(path, &cfg)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return cfg, fmt.Errorf("parse %s: %w", path, err)
|
if os.IsNotExist(err) {
|
||||||
|
return cfg, nil, nil, nil
|
||||||
|
}
|
||||||
|
return cfg, nil, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if !md.IsDefined("display_ascii_art") {
|
// Decode into a raw map so we can run deprecation migrations before
|
||||||
cfg.DisplayAsciiArt = defaultConfig().DisplayAsciiArt
|
// the struct decode sees the keys.
|
||||||
|
var raw map[string]any
|
||||||
|
if _, err := toml.Decode(string(data), &raw); err != nil {
|
||||||
|
return cfg, nil, nil, fmt.Errorf("parse %s: %w", path, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if !md.IsDefined("key", "always_prompt_delete") {
|
warnings := migrateRawConfig(raw)
|
||||||
cfg.Key.AlwaysPromptDelete = defaultConfig().Key.AlwaysPromptDelete
|
|
||||||
|
// Re-encode the migrated map and decode into the typed struct so
|
||||||
|
// defaults fill any missing fields.
|
||||||
|
var buf bytes.Buffer
|
||||||
|
if err := toml.NewEncoder(&buf).Encode(raw); err != nil {
|
||||||
|
return cfg, nil, nil, fmt.Errorf("parse %s: %w", path, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if !md.IsDefined("store", "default_store_name") || cfg.Store.DefaultStoreName == "" {
|
meta, err := toml.Decode(buf.String(), &cfg)
|
||||||
|
if err != nil {
|
||||||
|
return cfg, nil, nil, fmt.Errorf("parse %s: %w", path, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var undecoded []string
|
||||||
|
for _, key := range meta.Undecoded() {
|
||||||
|
undecoded = append(undecoded, key.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
if cfg.Store.DefaultStoreName == "" {
|
||||||
cfg.Store.DefaultStoreName = defaultConfig().Store.DefaultStoreName
|
cfg.Store.DefaultStoreName = defaultConfig().Store.DefaultStoreName
|
||||||
|
|
||||||
}
|
|
||||||
if !md.IsDefined("store", "always_prompt_delete") {
|
|
||||||
cfg.Store.AlwaysPromptDelete = defaultConfig().Store.AlwaysPromptDelete
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if !md.IsDefined("key", "always_prompt_overwrite") {
|
if cfg.List.DefaultListFormat == "" {
|
||||||
cfg.Key.AlwaysPromptOverwrite = defaultConfig().Key.AlwaysPromptOverwrite
|
cfg.List.DefaultListFormat = defaultConfig().List.DefaultListFormat
|
||||||
|
}
|
||||||
|
if err := validListFormat(cfg.List.DefaultListFormat); err != nil {
|
||||||
|
return cfg, undecoded, warnings, fmt.Errorf("parse %s: list.default_list_format: %w", path, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if !md.IsDefined("git", "auto_fetch") {
|
if cfg.List.DefaultColumns == "" {
|
||||||
cfg.Git.AutoFetch = defaultConfig().Git.AutoFetch
|
cfg.List.DefaultColumns = defaultConfig().List.DefaultColumns
|
||||||
|
}
|
||||||
|
if err := validListColumns(cfg.List.DefaultColumns); err != nil {
|
||||||
|
return cfg, undecoded, warnings, fmt.Errorf("parse %s: list.default_columns: %w", path, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if !md.IsDefined("git", "auto_commit") {
|
if cfg.Git.DefaultCommitMessage == "" {
|
||||||
cfg.Git.AutoCommit = defaultConfig().Git.AutoCommit
|
cfg.Git.DefaultCommitMessage = defaultConfig().Git.DefaultCommitMessage
|
||||||
}
|
}
|
||||||
|
|
||||||
if !md.IsDefined("git", "auto_push") {
|
return cfg, undecoded, warnings, nil
|
||||||
cfg.Git.AutoPush = defaultConfig().Git.AutoPush
|
}
|
||||||
}
|
|
||||||
|
|
||||||
return cfg, nil
|
// validateConfig checks invariants on a Config value before it is persisted.
|
||||||
|
func validateConfig(cfg Config) error {
|
||||||
|
if err := validListFormat(cfg.List.DefaultListFormat); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return validListColumns(cfg.List.DefaultColumns)
|
||||||
}
|
}
|
||||||
|
|
||||||
func configPath() (string, error) {
|
func configPath() (string, error) {
|
||||||
|
|
|
||||||
255
cmd/config_cmd.go
Normal file
255
cmd/config_cmd.go
Normal file
|
|
@ -0,0 +1,255 @@
|
||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
"path/filepath"
|
||||||
|
"reflect"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/BurntSushi/toml"
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
)
|
||||||
|
|
||||||
|
var configCmd = &cobra.Command{
|
||||||
|
Use: "config",
|
||||||
|
Short: "View and modify configuration",
|
||||||
|
}
|
||||||
|
|
||||||
|
var configListCmd = &cobra.Command{
|
||||||
|
Use: "list",
|
||||||
|
Aliases: []string{"ls"},
|
||||||
|
Short: "List all configuration values",
|
||||||
|
Args: cobra.NoArgs,
|
||||||
|
SilenceUsage: true,
|
||||||
|
RunE: func(cmd *cobra.Command, args []string) error {
|
||||||
|
defaults := defaultConfig()
|
||||||
|
fields := configFields(&config, &defaults)
|
||||||
|
for _, f := range fields {
|
||||||
|
fmt.Printf("%s = %v\n", f.Key, f.Value)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
var configGetCmd = &cobra.Command{
|
||||||
|
Use: "get <key>",
|
||||||
|
Short: "Print a configuration value",
|
||||||
|
Args: cobra.ExactArgs(1),
|
||||||
|
SilenceUsage: true,
|
||||||
|
RunE: func(cmd *cobra.Command, args []string) error {
|
||||||
|
defaults := defaultConfig()
|
||||||
|
fields := configFields(&config, &defaults)
|
||||||
|
f := findConfigField(fields, args[0])
|
||||||
|
if f == nil {
|
||||||
|
err := fmt.Errorf("unknown config key '%s'", args[0])
|
||||||
|
if suggestions := suggestConfigKey(fields, args[0]); len(suggestions) > 0 {
|
||||||
|
return withHint(err, fmt.Sprintf("did you mean '%s'?", strings.Join(suggestions, "', '")))
|
||||||
|
}
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
fmt.Printf("%v\n", f.Value)
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
var configPathCmd = &cobra.Command{
|
||||||
|
Use: "path",
|
||||||
|
Short: "Print config file path",
|
||||||
|
Args: cobra.NoArgs,
|
||||||
|
SilenceUsage: true,
|
||||||
|
RunE: func(cmd *cobra.Command, args []string) error {
|
||||||
|
p, err := configPath()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot determine config path: %w", err)
|
||||||
|
}
|
||||||
|
fmt.Println(p)
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
var configEditCmd = &cobra.Command{
|
||||||
|
Use: "edit",
|
||||||
|
Short: "Open config file in $EDITOR",
|
||||||
|
Args: cobra.NoArgs,
|
||||||
|
SilenceUsage: true,
|
||||||
|
RunE: func(cmd *cobra.Command, args []string) error {
|
||||||
|
editor := os.Getenv("EDITOR")
|
||||||
|
if editor == "" {
|
||||||
|
return withHint(
|
||||||
|
fmt.Errorf("EDITOR not set"),
|
||||||
|
"set $EDITOR to your preferred text editor",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
p, err := configPath()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot determine config path: %w", err)
|
||||||
|
}
|
||||||
|
// Create default config if file doesn't exist
|
||||||
|
if _, err := os.Stat(p); os.IsNotExist(err) {
|
||||||
|
if err := writeConfigFile(defaultConfig()); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
c := exec.Command(editor, p)
|
||||||
|
c.Stdin = os.Stdin
|
||||||
|
c.Stdout = os.Stdout
|
||||||
|
c.Stderr = os.Stderr
|
||||||
|
if err := c.Run(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
cfg, undecoded, migrations, err := loadConfig()
|
||||||
|
for _, m := range migrations {
|
||||||
|
if m.Conflict {
|
||||||
|
warnf("both '%s' and '%s' present; using '%s'", m.Old, m.New, m.New)
|
||||||
|
} else {
|
||||||
|
warnf("config key '%s' is deprecated, use '%s'", m.Old, m.New)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
warnf("config has errors: %v", err)
|
||||||
|
printHint("re-run 'pda config edit' to fix")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if len(undecoded) > 0 {
|
||||||
|
warnf("unrecognised key(s) will be ignored: %s", strings.Join(undecoded, ", "))
|
||||||
|
}
|
||||||
|
config = cfg
|
||||||
|
configUndecodedKeys = undecoded
|
||||||
|
configErr = nil
|
||||||
|
okf("saved config: %s", p)
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
func writeConfigFile(cfg Config) error {
|
||||||
|
p, err := configPath()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot determine config path: %w", err)
|
||||||
|
}
|
||||||
|
if err := os.MkdirAll(filepath.Dir(p), 0o750); err != nil {
|
||||||
|
return fmt.Errorf("cannot create config directory: %w", err)
|
||||||
|
}
|
||||||
|
f, err := os.Create(p)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot write config: %w", err)
|
||||||
|
}
|
||||||
|
defer f.Close()
|
||||||
|
enc := toml.NewEncoder(f)
|
||||||
|
return enc.Encode(cfg)
|
||||||
|
}
|
||||||
|
|
||||||
|
var configInitCmd = &cobra.Command{
|
||||||
|
Use: "init",
|
||||||
|
Short: "Generate default config file",
|
||||||
|
Args: cobra.NoArgs,
|
||||||
|
SilenceUsage: true,
|
||||||
|
RunE: func(cmd *cobra.Command, args []string) error {
|
||||||
|
p, err := configPath()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot determine config path: %w", err)
|
||||||
|
}
|
||||||
|
newFlag, _ := cmd.Flags().GetBool("new")
|
||||||
|
updateFlag, _ := cmd.Flags().GetBool("update")
|
||||||
|
|
||||||
|
if newFlag && updateFlag {
|
||||||
|
return fmt.Errorf("--new and --update are mutually exclusive")
|
||||||
|
}
|
||||||
|
|
||||||
|
if updateFlag {
|
||||||
|
if _, err := os.Stat(p); os.IsNotExist(err) {
|
||||||
|
return withHint(
|
||||||
|
fmt.Errorf("no config file to update"),
|
||||||
|
"use 'pda config init' to create one",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
cfg, _, migrations, loadErr := loadConfig()
|
||||||
|
if loadErr != nil {
|
||||||
|
return fmt.Errorf("cannot update config: %w", loadErr)
|
||||||
|
}
|
||||||
|
if err := writeConfigFile(cfg); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
for _, m := range migrations {
|
||||||
|
okf("%s migrated to %s", m.Old, m.New)
|
||||||
|
}
|
||||||
|
okf("updated config: %s", p)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if !newFlag {
|
||||||
|
if _, err := os.Stat(p); err == nil {
|
||||||
|
return withHint(
|
||||||
|
fmt.Errorf("config file already exists"),
|
||||||
|
"use '--update' to update your config, or '--new' to get a fresh copy",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
okf("generated config: %s", p)
|
||||||
|
return writeConfigFile(defaultConfig())
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
var configSetCmd = &cobra.Command{
|
||||||
|
Use: "set <key> <value>",
|
||||||
|
Short: "Set a configuration value",
|
||||||
|
Args: cobra.ExactArgs(2),
|
||||||
|
SilenceUsage: true,
|
||||||
|
RunE: func(cmd *cobra.Command, args []string) error {
|
||||||
|
key, raw := args[0], args[1]
|
||||||
|
cfg := config
|
||||||
|
defaults := defaultConfig()
|
||||||
|
fields := configFields(&cfg, &defaults)
|
||||||
|
f := findConfigField(fields, key)
|
||||||
|
if f == nil {
|
||||||
|
err := fmt.Errorf("unknown config key '%s'", key)
|
||||||
|
if suggestions := suggestConfigKey(fields, key); len(suggestions) > 0 {
|
||||||
|
return withHint(err, fmt.Sprintf("did you mean '%s'?", strings.Join(suggestions, "', '")))
|
||||||
|
}
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
switch f.Kind {
|
||||||
|
case reflect.Bool:
|
||||||
|
switch strings.ToLower(raw) {
|
||||||
|
case "true":
|
||||||
|
f.Field.SetBool(true)
|
||||||
|
case "false":
|
||||||
|
f.Field.SetBool(false)
|
||||||
|
default:
|
||||||
|
return fmt.Errorf("cannot set '%s': expected bool (true/false), got '%s'", key, raw)
|
||||||
|
}
|
||||||
|
case reflect.String:
|
||||||
|
f.Field.SetString(raw)
|
||||||
|
default:
|
||||||
|
return fmt.Errorf("cannot set '%s': unsupported type %s", key, f.Kind)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := validateConfig(cfg); err != nil {
|
||||||
|
return fmt.Errorf("cannot set '%s': %w", key, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := writeConfigFile(cfg); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
config = cfg
|
||||||
|
configUndecodedKeys = nil
|
||||||
|
okf("%s set to '%s'", key, raw)
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
configInitCmd.Flags().Bool("new", false, "overwrite existing config file")
|
||||||
|
configInitCmd.Flags().Bool("update", false, "migrate deprecated keys and fill missing defaults")
|
||||||
|
configCmd.AddCommand(configEditCmd)
|
||||||
|
configCmd.AddCommand(configGetCmd)
|
||||||
|
configCmd.AddCommand(configInitCmd)
|
||||||
|
configCmd.AddCommand(configListCmd)
|
||||||
|
configCmd.AddCommand(configPathCmd)
|
||||||
|
configCmd.AddCommand(configSetCmd)
|
||||||
|
rootCmd.AddCommand(configCmd)
|
||||||
|
}
|
||||||
105
cmd/config_fields.go
Normal file
105
cmd/config_fields.go
Normal file
|
|
@ -0,0 +1,105 @@
|
||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"reflect"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/agnivade/levenshtein"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ConfigField represents a single leaf field in the Config struct,
|
||||||
|
// mapped to its dotted TOML key path.
|
||||||
|
type ConfigField struct {
|
||||||
|
Key string // dotted key, e.g. "git.auto_commit"
|
||||||
|
Value any // current value
|
||||||
|
Default any // value from defaultConfig()
|
||||||
|
IsDefault bool // Value == Default
|
||||||
|
Field reflect.Value // settable reflect.Value (from cfg pointer)
|
||||||
|
Kind reflect.Kind // field type kind
|
||||||
|
}
|
||||||
|
|
||||||
|
// configFields walks cfg and defaults in parallel, returning a ConfigField
|
||||||
|
// for every leaf field. Keys are built from TOML struct tags.
|
||||||
|
func configFields(cfg, defaults *Config) []ConfigField {
|
||||||
|
var fields []ConfigField
|
||||||
|
walk(reflect.ValueOf(cfg).Elem(), reflect.ValueOf(defaults).Elem(), "", &fields)
|
||||||
|
return fields
|
||||||
|
}
|
||||||
|
|
||||||
|
func walk(cv, dv reflect.Value, prefix string, out *[]ConfigField) {
|
||||||
|
ct := cv.Type()
|
||||||
|
for i := 0; i < ct.NumField(); i++ {
|
||||||
|
sf := ct.Field(i)
|
||||||
|
tag := sf.Tag.Get("toml")
|
||||||
|
if tag == "" || tag == "-" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
key := tag
|
||||||
|
if prefix != "" {
|
||||||
|
key = prefix + "." + tag
|
||||||
|
}
|
||||||
|
|
||||||
|
cfv := cv.Field(i)
|
||||||
|
dfv := dv.Field(i)
|
||||||
|
|
||||||
|
if sf.Type.Kind() == reflect.Struct {
|
||||||
|
walk(cfv, dfv, key, out)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
*out = append(*out, ConfigField{
|
||||||
|
Key: key,
|
||||||
|
Value: cfv.Interface(),
|
||||||
|
Default: dfv.Interface(),
|
||||||
|
IsDefault: reflect.DeepEqual(cfv.Interface(), dfv.Interface()),
|
||||||
|
Field: cfv,
|
||||||
|
Kind: sf.Type.Kind(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// findConfigField returns the ConfigField matching the given dotted key,
|
||||||
|
// or nil if not found.
|
||||||
|
func findConfigField(fields []ConfigField, key string) *ConfigField {
|
||||||
|
for i := range fields {
|
||||||
|
if fields[i].Key == key {
|
||||||
|
return &fields[i]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// suggestConfigKey returns suggestions for a mistyped config key. More generous
|
||||||
|
// than key/store suggestions since the config key space is small (~11 keys).
|
||||||
|
// Normalises spaces to underscores and matches against both the full dotted key
|
||||||
|
// and the leaf segment (part after the last dot).
|
||||||
|
func suggestConfigKey(fields []ConfigField, target string) []string {
|
||||||
|
normalized := strings.ReplaceAll(target, " ", "_")
|
||||||
|
var suggestions []string
|
||||||
|
for _, f := range fields {
|
||||||
|
if matchesConfigKey(normalized, f.Key) {
|
||||||
|
suggestions = append(suggestions, f.Key)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return suggestions
|
||||||
|
}
|
||||||
|
|
||||||
|
func matchesConfigKey(input, key string) bool {
|
||||||
|
// Substring match (either direction)
|
||||||
|
if strings.Contains(key, input) || strings.Contains(input, key) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
// Levenshtein against full dotted key
|
||||||
|
if levenshtein.ComputeDistance(input, key) <= max(len(key)/3, 4) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
// Levenshtein against leaf segment
|
||||||
|
if i := strings.LastIndex(key, "."); i >= 0 {
|
||||||
|
leaf := key[i+1:]
|
||||||
|
if levenshtein.ComputeDistance(input, leaf) <= max(len(leaf)/3, 1) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
144
cmd/config_fields_test.go
Normal file
144
cmd/config_fields_test.go
Normal file
|
|
@ -0,0 +1,144 @@
|
||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"reflect"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestConfigFieldsReturnsAllFields(t *testing.T) {
|
||||||
|
cfg := defaultConfig()
|
||||||
|
defaults := defaultConfig()
|
||||||
|
fields := configFields(&cfg, &defaults)
|
||||||
|
|
||||||
|
// Count expected leaf fields by walking the struct
|
||||||
|
expected := countLeafFields(reflect.TypeOf(Config{}))
|
||||||
|
if len(fields) != expected {
|
||||||
|
t.Errorf("configFields returned %d fields, want %d", len(fields), expected)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func countLeafFields(t reflect.Type) int {
|
||||||
|
n := 0
|
||||||
|
for i := 0; i < t.NumField(); i++ {
|
||||||
|
f := t.Field(i)
|
||||||
|
if f.Type.Kind() == reflect.Struct {
|
||||||
|
n += countLeafFields(f.Type)
|
||||||
|
} else {
|
||||||
|
n++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return n
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigFieldsDottedKeys(t *testing.T) {
|
||||||
|
cfg := defaultConfig()
|
||||||
|
defaults := defaultConfig()
|
||||||
|
fields := configFields(&cfg, &defaults)
|
||||||
|
|
||||||
|
want := map[string]bool{
|
||||||
|
"display_ascii_art": true,
|
||||||
|
"key.always_prompt_delete": true,
|
||||||
|
"key.always_prompt_glob_delete": true,
|
||||||
|
"key.always_prompt_overwrite": true,
|
||||||
|
"key.always_encrypt": true,
|
||||||
|
"store.default_store_name": true,
|
||||||
|
"store.always_prompt_delete": true,
|
||||||
|
"store.always_prompt_overwrite": true,
|
||||||
|
"list.always_show_all_stores": true,
|
||||||
|
"list.default_list_format": true,
|
||||||
|
"list.always_show_full_values": true,
|
||||||
|
"list.always_hide_header": true,
|
||||||
|
"list.default_columns": true,
|
||||||
|
"git.auto_fetch": true,
|
||||||
|
"git.auto_commit": true,
|
||||||
|
"git.auto_push": true,
|
||||||
|
"git.default_commit_message": true,
|
||||||
|
}
|
||||||
|
|
||||||
|
got := make(map[string]bool)
|
||||||
|
for _, f := range fields {
|
||||||
|
got[f.Key] = true
|
||||||
|
}
|
||||||
|
|
||||||
|
for k := range want {
|
||||||
|
if !got[k] {
|
||||||
|
t.Errorf("missing key %q", k)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for k := range got {
|
||||||
|
if !want[k] {
|
||||||
|
t.Errorf("unexpected key %q", k)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigFieldsAllDefaults(t *testing.T) {
|
||||||
|
cfg := defaultConfig()
|
||||||
|
defaults := defaultConfig()
|
||||||
|
fields := configFields(&cfg, &defaults)
|
||||||
|
|
||||||
|
for _, f := range fields {
|
||||||
|
if !f.IsDefault {
|
||||||
|
t.Errorf("field %q should be default, got Value=%v Default=%v", f.Key, f.Value, f.Default)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigFieldsDetectsNonDefault(t *testing.T) {
|
||||||
|
cfg := defaultConfig()
|
||||||
|
cfg.Git.AutoCommit = true
|
||||||
|
defaults := defaultConfig()
|
||||||
|
fields := configFields(&cfg, &defaults)
|
||||||
|
|
||||||
|
for _, f := range fields {
|
||||||
|
if f.Key == "git.auto_commit" {
|
||||||
|
if f.IsDefault {
|
||||||
|
t.Errorf("git.auto_commit should not be default after change")
|
||||||
|
}
|
||||||
|
if f.Value != true {
|
||||||
|
t.Errorf("git.auto_commit Value = %v, want true", f.Value)
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
t.Error("git.auto_commit not found in fields")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigFieldsSettable(t *testing.T) {
|
||||||
|
cfg := defaultConfig()
|
||||||
|
defaults := defaultConfig()
|
||||||
|
fields := configFields(&cfg, &defaults)
|
||||||
|
|
||||||
|
for _, f := range fields {
|
||||||
|
if f.Key == "git.auto_push" {
|
||||||
|
if f.Kind != reflect.Bool {
|
||||||
|
t.Errorf("git.auto_push Kind = %v, want Bool", f.Kind)
|
||||||
|
}
|
||||||
|
f.Field.SetBool(true)
|
||||||
|
if !cfg.Git.AutoPush {
|
||||||
|
t.Error("setting field via reflect did not update cfg")
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
t.Error("git.auto_push not found in fields")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigFieldsStringField(t *testing.T) {
|
||||||
|
cfg := defaultConfig()
|
||||||
|
defaults := defaultConfig()
|
||||||
|
fields := configFields(&cfg, &defaults)
|
||||||
|
|
||||||
|
for _, f := range fields {
|
||||||
|
if f.Key == "store.default_store_name" {
|
||||||
|
if f.Kind != reflect.String {
|
||||||
|
t.Errorf("store.default_store_name Kind = %v, want String", f.Kind)
|
||||||
|
}
|
||||||
|
if f.Value != "store" {
|
||||||
|
t.Errorf("store.default_store_name Value = %v, want 'store'", f.Value)
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
t.Error("store.default_store_name not found in fields")
|
||||||
|
}
|
||||||
92
cmd/config_migrate.go
Normal file
92
cmd/config_migrate.go
Normal file
|
|
@ -0,0 +1,92 @@
|
||||||
|
package cmd
|
||||||
|
|
||||||
|
import "strings"
|
||||||
|
|
||||||
|
type deprecation struct {
|
||||||
|
Old string // e.g. "list.list_all_stores"
|
||||||
|
New string // e.g. "list.always_show_all_stores"
|
||||||
|
}
|
||||||
|
|
||||||
|
type migration struct {
|
||||||
|
Old string // key that was removed
|
||||||
|
New string // key that holds the value
|
||||||
|
Conflict bool // both old and new were present; new key wins
|
||||||
|
}
|
||||||
|
|
||||||
|
var deprecations = []deprecation{
|
||||||
|
{"list.list_all_stores", "list.always_show_all_stores"},
|
||||||
|
}
|
||||||
|
|
||||||
|
func migrateRawConfig(raw map[string]any) []migration {
|
||||||
|
var migrations []migration
|
||||||
|
for _, dep := range deprecations {
|
||||||
|
oldParts := strings.Split(dep.Old, ".")
|
||||||
|
newParts := strings.Split(dep.New, ".")
|
||||||
|
|
||||||
|
_, ok := nestedGet(raw, oldParts)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
m := migration{Old: dep.Old, New: dep.New}
|
||||||
|
if _, exists := nestedGet(raw, newParts); exists {
|
||||||
|
m.Conflict = true
|
||||||
|
} else {
|
||||||
|
nestedSet(raw, newParts, nestedMustGet(raw, oldParts))
|
||||||
|
}
|
||||||
|
nestedDelete(raw, oldParts)
|
||||||
|
migrations = append(migrations, m)
|
||||||
|
}
|
||||||
|
return migrations
|
||||||
|
}
|
||||||
|
|
||||||
|
func nestedMustGet(m map[string]any, parts []string) any {
|
||||||
|
v, _ := nestedGet(m, parts)
|
||||||
|
return v
|
||||||
|
}
|
||||||
|
|
||||||
|
func nestedGet(m map[string]any, parts []string) (any, bool) {
|
||||||
|
for i, p := range parts {
|
||||||
|
v, ok := m[p]
|
||||||
|
if !ok {
|
||||||
|
return nil, false
|
||||||
|
}
|
||||||
|
if i == len(parts)-1 {
|
||||||
|
return v, true
|
||||||
|
}
|
||||||
|
sub, ok := v.(map[string]any)
|
||||||
|
if !ok {
|
||||||
|
return nil, false
|
||||||
|
}
|
||||||
|
m = sub
|
||||||
|
}
|
||||||
|
return nil, false
|
||||||
|
}
|
||||||
|
|
||||||
|
func nestedSet(m map[string]any, parts []string, val any) {
|
||||||
|
for i, p := range parts {
|
||||||
|
if i == len(parts)-1 {
|
||||||
|
m[p] = val
|
||||||
|
return
|
||||||
|
}
|
||||||
|
sub, ok := m[p].(map[string]any)
|
||||||
|
if !ok {
|
||||||
|
sub = make(map[string]any)
|
||||||
|
m[p] = sub
|
||||||
|
}
|
||||||
|
m = sub
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func nestedDelete(m map[string]any, parts []string) {
|
||||||
|
for i, p := range parts {
|
||||||
|
if i == len(parts)-1 {
|
||||||
|
delete(m, p)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
sub, ok := m[p].(map[string]any)
|
||||||
|
if !ok {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
m = sub
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -31,43 +31,47 @@ import (
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
)
|
)
|
||||||
|
|
||||||
// delDbCmd represents the set command
|
// delStoreCmd represents the set command
|
||||||
var delDbCmd = &cobra.Command{
|
var delStoreCmd = &cobra.Command{
|
||||||
Use: "del-db DB",
|
Use: "remove-store STORE",
|
||||||
Short: "Delete a database.",
|
Short: "Delete a store",
|
||||||
Aliases: []string{"delete-db", "rm-db", "remove-db"},
|
Aliases: []string{"rms"},
|
||||||
Args: cobra.ExactArgs(1),
|
Args: cobra.ExactArgs(1),
|
||||||
RunE: delDb,
|
ValidArgsFunction: completeStores,
|
||||||
SilenceUsage: true,
|
RunE: delStore,
|
||||||
|
SilenceUsage: true,
|
||||||
}
|
}
|
||||||
|
|
||||||
func delDb(cmd *cobra.Command, args []string) error {
|
func delStore(cmd *cobra.Command, args []string) error {
|
||||||
store := &Store{}
|
store := &Store{}
|
||||||
dbName, err := store.parseDB(args[0], false)
|
dbName, err := store.parseDB(args[0], false)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("cannot delete-db '%s': %v", args[0], err)
|
return fmt.Errorf("cannot delete store '%s': %v", args[0], err)
|
||||||
}
|
}
|
||||||
var notFound errNotFound
|
var notFound errNotFound
|
||||||
path, err := store.FindStore(dbName)
|
path, err := store.FindStore(dbName)
|
||||||
if errors.As(err, ¬Found) {
|
if errors.As(err, ¬Found) {
|
||||||
return fmt.Errorf("cannot delete-db '%s': %v", dbName, err)
|
return fmt.Errorf("cannot delete store '%s': %w", dbName, err)
|
||||||
}
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("cannot delete-db '%s': %v", dbName, err)
|
return fmt.Errorf("cannot delete store '%s': %v", dbName, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
interactive, err := cmd.Flags().GetBool("interactive")
|
interactive, err := cmd.Flags().GetBool("interactive")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("cannot delete-db '%s': %v", dbName, err)
|
return fmt.Errorf("cannot delete store '%s': %v", dbName, err)
|
||||||
|
}
|
||||||
|
yes, err := cmd.Flags().GetBool("yes")
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot delete store '%s': %v", dbName, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if interactive || config.Store.AlwaysPromptDelete {
|
if !yes && (interactive || config.Store.AlwaysPromptDelete) {
|
||||||
message := fmt.Sprintf("delete-db '%s': are you sure? (y/n)", args[0])
|
promptf("delete store '%s'? (y/n)", args[0])
|
||||||
fmt.Println(message)
|
|
||||||
|
|
||||||
var confirm string
|
var confirm string
|
||||||
if _, err := fmt.Scanln(&confirm); err != nil {
|
if err := scanln(&confirm); err != nil {
|
||||||
return fmt.Errorf("cannot delete-db '%s': %v", dbName, err)
|
return fmt.Errorf("cannot delete store '%s': %v", dbName, err)
|
||||||
}
|
}
|
||||||
if strings.ToLower(confirm) != "y" {
|
if strings.ToLower(confirm) != "y" {
|
||||||
return nil
|
return nil
|
||||||
|
|
@ -76,17 +80,18 @@ func delDb(cmd *cobra.Command, args []string) error {
|
||||||
if err := executeDeletion(path); err != nil {
|
if err := executeDeletion(path); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
return autoSync()
|
return autoSync(fmt.Sprintf("removed @%s", dbName))
|
||||||
}
|
}
|
||||||
|
|
||||||
func executeDeletion(path string) error {
|
func executeDeletion(path string) error {
|
||||||
if err := os.RemoveAll(path); err != nil {
|
if err := os.Remove(path); err != nil {
|
||||||
return fmt.Errorf("cannot delete-db '%s': %v", path, err)
|
return fmt.Errorf("cannot delete store '%s': %v", path, err)
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
delDbCmd.Flags().BoolP("interactive", "i", false, "Prompt yes/no for each deletion")
|
delStoreCmd.Flags().BoolP("interactive", "i", false, "prompt yes/no for each deletion")
|
||||||
rootCmd.AddCommand(delDbCmd)
|
delStoreCmd.Flags().BoolP("yes", "y", false, "skip all confirmation prompts")
|
||||||
|
rootCmd.AddCommand(delStoreCmd)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
224
cmd/del.go
224
cmd/del.go
|
|
@ -23,22 +23,21 @@ THE SOFTWARE.
|
||||||
package cmd
|
package cmd
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"errors"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/dgraph-io/badger/v4"
|
|
||||||
"github.com/gobwas/glob"
|
"github.com/gobwas/glob"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
)
|
)
|
||||||
|
|
||||||
// delCmd represents the set command
|
// delCmd represents the remove command
|
||||||
var delCmd = &cobra.Command{
|
var delCmd = &cobra.Command{
|
||||||
Use: "del KEY[@DB] [KEY[@DB] ...]",
|
Use: "remove KEY[@STORE] [KEY[@STORE] ...]",
|
||||||
Short: "Delete one or more keys. Optionally specify a db.",
|
Short: "Delete one or more keys",
|
||||||
Aliases: []string{"delete", "rm", "remove"},
|
Aliases: []string{"rm"},
|
||||||
Args: cobra.ArbitraryArgs,
|
Args: cobra.ArbitraryArgs,
|
||||||
RunE: del,
|
ValidArgsFunction: completeKeys,
|
||||||
|
RunE: del,
|
||||||
SilenceUsage: true,
|
SilenceUsage: true,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -49,121 +48,133 @@ func del(cmd *cobra.Command, args []string) error {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
globPatterns, err := cmd.Flags().GetStringSlice("glob")
|
yes, err := cmd.Flags().GetBool("yes")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
separators, err := parseGlobSeparators(cmd)
|
keyPatterns, err := cmd.Flags().GetStringSlice("key")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
valuePatterns, err := cmd.Flags().GetStringSlice("value")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
storePatterns, err := cmd.Flags().GetStringSlice("store")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(args) == 0 && len(globPatterns) == 0 {
|
hasFilters := len(keyPatterns) > 0 || len(valuePatterns) > 0 || len(storePatterns) > 0
|
||||||
|
if len(args) == 0 && !hasFilters {
|
||||||
return fmt.Errorf("cannot remove: no keys provided")
|
return fmt.Errorf("cannot remove: no keys provided")
|
||||||
}
|
}
|
||||||
|
|
||||||
targets, err := resolveDeleteTargets(store, args, globPatterns, separators)
|
targets, err := resolveDeleteTargets(store, args, keyPatterns, valuePatterns, storePatterns)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(targets) == 0 {
|
if len(targets) == 0 {
|
||||||
return fmt.Errorf("cannot remove: No such key")
|
return fmt.Errorf("cannot remove: no such key")
|
||||||
}
|
}
|
||||||
|
|
||||||
var processed []resolvedTarget
|
// Group targets by store for batch deletes.
|
||||||
|
type storeTargets struct {
|
||||||
|
targets []resolvedTarget
|
||||||
|
}
|
||||||
|
byStore := make(map[string]*storeTargets)
|
||||||
|
var storeOrder []string
|
||||||
|
promptGlob := hasFilters && config.Key.AlwaysPromptGlobDelete
|
||||||
for _, target := range targets {
|
for _, target := range targets {
|
||||||
if interactive || config.Key.AlwaysPromptDelete {
|
if !yes && (interactive || config.Key.AlwaysPromptDelete || promptGlob) {
|
||||||
var confirm string
|
var confirm string
|
||||||
message := fmt.Sprintf("remove %q: are you sure? (y/n)", target.display)
|
promptf("remove '%s'? (y/n)", target.display)
|
||||||
fmt.Println(message)
|
if err := scanln(&confirm); err != nil {
|
||||||
if _, err := fmt.Scanln(&confirm); err != nil {
|
|
||||||
return fmt.Errorf("cannot remove '%s': %v", target.full, err)
|
return fmt.Errorf("cannot remove '%s': %v", target.full, err)
|
||||||
}
|
}
|
||||||
if strings.ToLower(confirm) != "y" {
|
if strings.ToLower(confirm) != "y" {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
trans := TransactionArgs{
|
if _, ok := byStore[target.db]; !ok {
|
||||||
key: target.full,
|
byStore[target.db] = &storeTargets{}
|
||||||
readonly: false,
|
storeOrder = append(storeOrder, target.db)
|
||||||
sync: false,
|
|
||||||
transact: func(tx *badger.Txn, k []byte) error {
|
|
||||||
if err := tx.Delete(k); errors.Is(err, badger.ErrKeyNotFound) {
|
|
||||||
return fmt.Errorf("cannot remove '%s': No such key", target.full)
|
|
||||||
}
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("cannot remove '%s': %v", target.full, err)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
byStore[target.db].targets = append(byStore[target.db].targets, target)
|
||||||
if err := store.Transaction(trans); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
processed = append(processed, target)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(processed) == 0 {
|
if len(byStore) == 0 {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
var dbs []string
|
force, _ := cmd.Flags().GetBool("force")
|
||||||
var labels []string
|
|
||||||
for _, t := range processed {
|
var removedNames []string
|
||||||
spec, err := store.parseKey(t.full, true)
|
for _, dbName := range storeOrder {
|
||||||
|
st := byStore[dbName]
|
||||||
|
p, err := store.storePath(dbName)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
dbs = append(dbs, spec.DB)
|
entries, err := readStoreFile(p, nil)
|
||||||
labels = append(labels, t.display)
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
for _, t := range st.targets {
|
||||||
|
idx := findEntry(entries, t.key)
|
||||||
|
if idx < 0 {
|
||||||
|
return fmt.Errorf("cannot remove '%s': no such key", t.full)
|
||||||
|
}
|
||||||
|
if entries[idx].ReadOnly && !force {
|
||||||
|
return fmt.Errorf("cannot remove '%s': key is read-only", t.full)
|
||||||
|
}
|
||||||
|
entries = append(entries[:idx], entries[idx+1:]...)
|
||||||
|
removedNames = append(removedNames, t.display)
|
||||||
|
}
|
||||||
|
if err := writeStoreFile(p, entries, nil); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return autoSync()
|
|
||||||
|
return autoSync("removed " + strings.Join(removedNames, ", "))
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
delCmd.Flags().BoolP("interactive", "i", false, "Prompt yes/no for each deletion")
|
delCmd.Flags().BoolP("interactive", "i", false, "prompt yes/no for each deletion")
|
||||||
delCmd.Flags().StringSliceP("glob", "g", nil, "Delete keys matching glob pattern (repeatable)")
|
delCmd.Flags().BoolP("yes", "y", false, "skip all confirmation prompts")
|
||||||
delCmd.Flags().String("glob-sep", "", fmt.Sprintf("Characters treated as separators for globbing (default %q)", defaultGlobSeparatorsDisplay()))
|
delCmd.Flags().Bool("force", false, "bypass read-only protection")
|
||||||
|
delCmd.Flags().StringSliceP("key", "k", nil, "delete keys matching glob pattern (repeatable)")
|
||||||
|
delCmd.Flags().StringSliceP("store", "s", nil, "target stores matching glob pattern (repeatable)")
|
||||||
|
delCmd.RegisterFlagCompletionFunc("store", completeStoreFlag)
|
||||||
|
delCmd.Flags().StringSliceP("value", "v", nil, "delete entries matching value glob pattern (repeatable)")
|
||||||
rootCmd.AddCommand(delCmd)
|
rootCmd.AddCommand(delCmd)
|
||||||
}
|
}
|
||||||
|
|
||||||
type resolvedTarget struct {
|
type resolvedTarget struct {
|
||||||
full string
|
full string
|
||||||
display string
|
display string
|
||||||
|
key string
|
||||||
|
db string
|
||||||
}
|
}
|
||||||
|
|
||||||
func keyExists(store *Store, arg string) (bool, error) {
|
func keyExists(store *Store, arg string) (bool, error) {
|
||||||
var notFound bool
|
|
||||||
trans := TransactionArgs{
|
|
||||||
key: arg,
|
|
||||||
readonly: true,
|
|
||||||
sync: false,
|
|
||||||
transact: func(tx *badger.Txn, k []byte) error {
|
|
||||||
if _, err := tx.Get(k); errors.Is(err, badger.ErrKeyNotFound) {
|
|
||||||
notFound = true
|
|
||||||
return nil
|
|
||||||
} else {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
if err := store.Transaction(trans); err != nil {
|
|
||||||
return false, err
|
|
||||||
}
|
|
||||||
return !notFound, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func formatKeyForPrompt(store *Store, arg string) (string, error) {
|
|
||||||
spec, err := store.parseKey(arg, true)
|
spec, err := store.parseKey(arg, true)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return false, err
|
||||||
}
|
}
|
||||||
return spec.Display(), nil
|
p, err := store.storePath(spec.DB)
|
||||||
|
if err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
entries, err := readStoreFile(p, nil)
|
||||||
|
if err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
return findEntry(entries, spec.Key) >= 0, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func resolveDeleteTargets(store *Store, exactArgs []string, globPatterns []string, separators []rune) ([]resolvedTarget, error) {
|
func resolveDeleteTargets(store *Store, exactArgs []string, globPatterns []string, valuePatterns []string, storePatterns []string) ([]resolvedTarget, error) {
|
||||||
targetSet := make(map[string]struct{})
|
targetSet := make(map[string]struct{})
|
||||||
var targets []resolvedTarget
|
var targets []resolvedTarget
|
||||||
|
|
||||||
|
|
@ -176,6 +187,8 @@ func resolveDeleteTargets(store *Store, exactArgs []string, globPatterns []strin
|
||||||
targets = append(targets, resolvedTarget{
|
targets = append(targets, resolvedTarget{
|
||||||
full: full,
|
full: full,
|
||||||
display: spec.Display(),
|
display: spec.Display(),
|
||||||
|
key: spec.Key,
|
||||||
|
db: spec.DB,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -194,16 +207,32 @@ func resolveDeleteTargets(store *Store, exactArgs []string, globPatterns []strin
|
||||||
addTarget(spec)
|
addTarget(spec)
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(globPatterns) == 0 {
|
if len(globPatterns) == 0 && len(valuePatterns) == 0 && len(storePatterns) == 0 {
|
||||||
return targets, nil
|
return targets, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Resolve --store patterns into a list of target stores.
|
||||||
|
storeMatchers, err := compileGlobMatchers(storePatterns)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("cannot remove: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
valueMatchers, err := compileValueMatchers(valuePatterns)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("cannot remove: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
type compiledPattern struct {
|
type compiledPattern struct {
|
||||||
rawArg string
|
rawArg string
|
||||||
db string
|
db string
|
||||||
matcher glob.Glob
|
matcher glob.Glob
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// When --store or --value is given without --key, match all keys.
|
||||||
|
if len(globPatterns) == 0 {
|
||||||
|
globPatterns = []string{"**"}
|
||||||
|
}
|
||||||
|
|
||||||
var compiled []compiledPattern
|
var compiled []compiledPattern
|
||||||
for _, raw := range globPatterns {
|
for _, raw := range globPatterns {
|
||||||
spec, err := store.parseKey(raw, true)
|
spec, err := store.parseKey(raw, true)
|
||||||
|
|
@ -211,41 +240,54 @@ func resolveDeleteTargets(store *Store, exactArgs []string, globPatterns []strin
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
pattern := spec.Key
|
pattern := spec.Key
|
||||||
m, err := glob.Compile(pattern, separators...)
|
m, err := glob.Compile(pattern, defaultGlobSeparators...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("cannot remove '%s': %v", raw, err)
|
return nil, fmt.Errorf("cannot remove '%s': %v", raw, err)
|
||||||
}
|
}
|
||||||
compiled = append(compiled, compiledPattern{
|
if len(storeMatchers) > 0 && !strings.Contains(raw, "@") {
|
||||||
rawArg: raw,
|
// --store given and pattern has no explicit @STORE: expand across matching stores.
|
||||||
db: spec.DB,
|
allStores, err := store.AllStores()
|
||||||
matcher: m,
|
if err != nil {
|
||||||
})
|
return nil, fmt.Errorf("cannot remove: %v", err)
|
||||||
|
}
|
||||||
|
for _, s := range allStores {
|
||||||
|
if globMatch(storeMatchers, s) {
|
||||||
|
compiled = append(compiled, compiledPattern{rawArg: raw, db: s, matcher: m})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
compiled = append(compiled, compiledPattern{rawArg: raw, db: spec.DB, matcher: m})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
keysByDB := make(map[string][]string)
|
entriesByDB := make(map[string][]Entry)
|
||||||
getKeys := func(db string) ([]string, error) {
|
getEntries := func(db string) ([]Entry, error) {
|
||||||
if keys, ok := keysByDB[db]; ok {
|
if entries, ok := entriesByDB[db]; ok {
|
||||||
return keys, nil
|
return entries, nil
|
||||||
}
|
}
|
||||||
keys, err := store.Keys(db)
|
p, err := store.storePath(db)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
keysByDB[db] = keys
|
entries, err := readStoreFile(p, nil)
|
||||||
return keys, nil
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
entriesByDB[db] = entries
|
||||||
|
return entries, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, p := range compiled {
|
for _, p := range compiled {
|
||||||
keys, err := getKeys(p.db)
|
entries, err := getEntries(p.db)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("cannot remove '%s': %v", p.rawArg, err)
|
return nil, fmt.Errorf("cannot remove '%s': %v", p.rawArg, err)
|
||||||
}
|
}
|
||||||
for _, k := range keys {
|
for _, e := range entries {
|
||||||
if p.matcher.Match(k) {
|
if p.matcher.Match(e.Key) && valueMatch(valueMatchers, e) {
|
||||||
addTarget(KeySpec{
|
addTarget(KeySpec{
|
||||||
Raw: k,
|
Raw: e.Key,
|
||||||
RawKey: k,
|
RawKey: e.Key,
|
||||||
Key: k,
|
Key: e.Key,
|
||||||
DB: p.db,
|
DB: p.db,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
||||||
361
cmd/doctor.go
Normal file
361
cmd/doctor.go
Normal file
|
|
@ -0,0 +1,361 @@
|
||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
"path/filepath"
|
||||||
|
"runtime"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
"golang.org/x/term"
|
||||||
|
)
|
||||||
|
|
||||||
|
var doctorCmd = &cobra.Command{
|
||||||
|
Use: "doctor",
|
||||||
|
Short: "Check environment health",
|
||||||
|
RunE: doctor,
|
||||||
|
SilenceUsage: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
rootCmd.AddCommand(doctorCmd)
|
||||||
|
}
|
||||||
|
|
||||||
|
func doctor(cmd *cobra.Command, args []string) error {
|
||||||
|
if runDoctor(os.Stdout) {
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func runDoctor(w io.Writer) bool {
|
||||||
|
tty := false
|
||||||
|
if f, ok := w.(*os.File); ok {
|
||||||
|
tty = term.IsTerminal(int(f.Fd()))
|
||||||
|
}
|
||||||
|
hasError := false
|
||||||
|
|
||||||
|
lastFail := false
|
||||||
|
|
||||||
|
emit := func(level, msg string) {
|
||||||
|
var code string
|
||||||
|
switch level {
|
||||||
|
case "ok":
|
||||||
|
code = "32"
|
||||||
|
lastFail = false
|
||||||
|
case "WARN":
|
||||||
|
code = "33"
|
||||||
|
lastFail = false
|
||||||
|
case "FAIL":
|
||||||
|
code = "31"
|
||||||
|
hasError = true
|
||||||
|
lastFail = true
|
||||||
|
}
|
||||||
|
if lastFail && tty {
|
||||||
|
fmt.Fprintf(w, "%s \033[1m%s\033[0m\n", keyword(code, level, tty), msg)
|
||||||
|
} else {
|
||||||
|
fmt.Fprintf(w, "%s %s\n", keyword(code, level, tty), msg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
tree := func(items []string) {
|
||||||
|
for i, item := range items {
|
||||||
|
connector := "├── "
|
||||||
|
if i == len(items)-1 {
|
||||||
|
connector = "└── "
|
||||||
|
}
|
||||||
|
if lastFail && tty {
|
||||||
|
fmt.Fprintf(w, "\033[1m %s%s\033[0m\n", connector, item)
|
||||||
|
} else {
|
||||||
|
fmt.Fprintf(w, " %s%s\n", connector, item)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 1. Version + platform
|
||||||
|
emit("ok", fmt.Sprintf("%s (%s/%s)", version, runtime.GOOS, runtime.GOARCH))
|
||||||
|
|
||||||
|
// 2. OS detail
|
||||||
|
switch runtime.GOOS {
|
||||||
|
case "linux":
|
||||||
|
if out, err := exec.Command("uname", "-r").Output(); err == nil {
|
||||||
|
emit("ok", fmt.Sprintf("OS: Linux %s", strings.TrimSpace(string(out))))
|
||||||
|
}
|
||||||
|
case "darwin":
|
||||||
|
if out, err := exec.Command("sw_vers", "-productVersion").Output(); err == nil {
|
||||||
|
emit("ok", fmt.Sprintf("OS: macOS %s", strings.TrimSpace(string(out))))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. Go version
|
||||||
|
emit("ok", fmt.Sprintf("Go: %s", runtime.Version()))
|
||||||
|
|
||||||
|
// 4. Git dependency
|
||||||
|
gitAvailable := false
|
||||||
|
if out, err := exec.Command("git", "--version").Output(); err == nil {
|
||||||
|
gitVer := strings.TrimSpace(string(out))
|
||||||
|
if after, ok := strings.CutPrefix(gitVer, "git version "); ok {
|
||||||
|
gitVer = after
|
||||||
|
}
|
||||||
|
emit("ok", fmt.Sprintf("Git: %s", gitVer))
|
||||||
|
gitAvailable = true
|
||||||
|
} else {
|
||||||
|
emit("WARN", "git not found on PATH")
|
||||||
|
}
|
||||||
|
|
||||||
|
// 5. Shell
|
||||||
|
if shell := os.Getenv("SHELL"); shell != "" {
|
||||||
|
emit("ok", fmt.Sprintf("Shell: %s", shell))
|
||||||
|
} else {
|
||||||
|
emit("WARN", "SHELL not set")
|
||||||
|
}
|
||||||
|
|
||||||
|
// 6. Config directory and file
|
||||||
|
cfgPath, err := configPath()
|
||||||
|
if err != nil {
|
||||||
|
emit("FAIL", fmt.Sprintf("Cannot determine config path: %v", err))
|
||||||
|
} else {
|
||||||
|
cfgDir := filepath.Dir(cfgPath)
|
||||||
|
envSuffix := ""
|
||||||
|
if os.Getenv("PDA_CONFIG") != "" {
|
||||||
|
envSuffix = " (PDA_CONFIG)"
|
||||||
|
}
|
||||||
|
|
||||||
|
var issues []string
|
||||||
|
if _, statErr := os.Stat(cfgPath); statErr != nil && !os.IsNotExist(statErr) {
|
||||||
|
issues = append(issues, fmt.Sprintf("Config file unreadable: %s", cfgPath))
|
||||||
|
}
|
||||||
|
if configErr != nil {
|
||||||
|
issues = append(issues, fmt.Sprintf("Parse error: %v", configErr))
|
||||||
|
issues = append(issues, "While broken, ONLY 'doctor', 'config edit', and 'config init' will function")
|
||||||
|
issues = append(issues, "Fix with 'pda config edit' or 'pda config init --new'")
|
||||||
|
}
|
||||||
|
if unexpectedFiles(cfgDir, map[string]bool{
|
||||||
|
"config.toml": true,
|
||||||
|
}) {
|
||||||
|
issues = append(issues, "Unexpected file(s) in directory")
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(issues) > 0 {
|
||||||
|
emit("FAIL", fmt.Sprintf("Config: %s%s", cfgDir, envSuffix))
|
||||||
|
tree(issues)
|
||||||
|
} else {
|
||||||
|
emit("ok", fmt.Sprintf("Config: %s%s", cfgDir, envSuffix))
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, statErr := os.Stat(cfgPath); os.IsNotExist(statErr) {
|
||||||
|
emit("ok", "Using default configuration")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 7. Non-default config values (skip if config failed to parse)
|
||||||
|
if configErr == nil {
|
||||||
|
defaults := defaultConfig()
|
||||||
|
if diffs := configDiffStrings(configFields(&config, &defaults)); len(diffs) > 0 {
|
||||||
|
emit("ok", "Non-default config:")
|
||||||
|
tree(diffs)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 7b. Unrecognised config keys
|
||||||
|
if len(configUndecodedKeys) > 0 {
|
||||||
|
emit("WARN", fmt.Sprintf("Unrecognised config key(s) (ignored):"))
|
||||||
|
tree(configUndecodedKeys)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 8. Data directory
|
||||||
|
store := &Store{}
|
||||||
|
dataDir, err := store.path()
|
||||||
|
if err != nil {
|
||||||
|
emit("FAIL", fmt.Sprintf("Data directory inaccessible: %v", err))
|
||||||
|
} else {
|
||||||
|
envSuffix := ""
|
||||||
|
if os.Getenv("PDA_DATA") != "" {
|
||||||
|
envSuffix = " (PDA_DATA)"
|
||||||
|
}
|
||||||
|
|
||||||
|
if unexpectedDataFiles(dataDir) {
|
||||||
|
emit("FAIL", fmt.Sprintf("Data: %s%s", dataDir, envSuffix))
|
||||||
|
tree([]string{"Unexpected file(s) in directory"})
|
||||||
|
} else {
|
||||||
|
emit("ok", fmt.Sprintf("Data: %s%s", dataDir, envSuffix))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 9. Identity file
|
||||||
|
idPath, err := identityPath()
|
||||||
|
if err != nil {
|
||||||
|
emit("FAIL", fmt.Sprintf("Cannot determine identity path: %v", err))
|
||||||
|
} else if _, err := os.Stat(idPath); os.IsNotExist(err) {
|
||||||
|
emit("WARN", "No identity file found")
|
||||||
|
} else if err != nil {
|
||||||
|
emit("FAIL", fmt.Sprintf("Cannot access identity file: %v", err))
|
||||||
|
} else {
|
||||||
|
info, _ := os.Stat(idPath)
|
||||||
|
emit("ok", fmt.Sprintf("Identity: %s", idPath))
|
||||||
|
if perm := info.Mode().Perm(); perm != 0o600 {
|
||||||
|
emit("WARN", fmt.Sprintf("Identity file permissions %04o (should be 0600)", perm))
|
||||||
|
}
|
||||||
|
if _, loadErr := loadIdentity(); loadErr != nil {
|
||||||
|
emit("WARN", fmt.Sprintf("Identity file invalid: %v", loadErr))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 10. Git initialised
|
||||||
|
gitInitialised := false
|
||||||
|
if dataDir != "" {
|
||||||
|
gitDir := filepath.Join(dataDir, ".git")
|
||||||
|
if _, err := os.Stat(gitDir); os.IsNotExist(err) {
|
||||||
|
emit("WARN", "Git not initialised")
|
||||||
|
} else if err != nil {
|
||||||
|
emit("FAIL", fmt.Sprintf("Cannot check git status: %v", err))
|
||||||
|
} else {
|
||||||
|
gitInitialised = true
|
||||||
|
branch, _ := currentBranch(dataDir)
|
||||||
|
if branch == "" {
|
||||||
|
branch = "unknown"
|
||||||
|
}
|
||||||
|
emit("ok", fmt.Sprintf("Git initialised on %s", branch))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 11. Git uncommitted changes (only if git initialised)
|
||||||
|
if gitInitialised && gitAvailable {
|
||||||
|
ucCmd := exec.Command("git", "status", "--porcelain")
|
||||||
|
ucCmd.Dir = dataDir
|
||||||
|
if out, err := ucCmd.Output(); err == nil {
|
||||||
|
if trimmed := strings.TrimSpace(string(out)); trimmed != "" {
|
||||||
|
count := len(strings.Split(trimmed, "\n"))
|
||||||
|
emit("WARN", fmt.Sprintf("Git %d file(s) with uncommitted changes", count))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 12. Git remote (only if git initialised)
|
||||||
|
hasOrigin := false
|
||||||
|
if gitInitialised {
|
||||||
|
var err error
|
||||||
|
hasOrigin, err = repoHasRemote(dataDir, "origin")
|
||||||
|
if err != nil {
|
||||||
|
emit("FAIL", fmt.Sprintf("Cannot check git remote: %v", err))
|
||||||
|
} else if hasOrigin {
|
||||||
|
emit("ok", "Git remote configured")
|
||||||
|
} else {
|
||||||
|
emit("WARN", "No git remote configured")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 13. Git sync (only if git initialised AND remote exists)
|
||||||
|
if gitInitialised && hasOrigin && gitAvailable {
|
||||||
|
info, err := repoRemoteInfo(dataDir)
|
||||||
|
if err != nil || info.Ref == "" {
|
||||||
|
emit("WARN", "Git sync status unknown")
|
||||||
|
} else {
|
||||||
|
ahead, behind, err := repoAheadBehind(dataDir, info.Ref)
|
||||||
|
if err != nil {
|
||||||
|
emit("WARN", "Git sync status unknown")
|
||||||
|
} else if ahead == 0 && behind == 0 {
|
||||||
|
emit("ok", "Git in sync with remote")
|
||||||
|
} else {
|
||||||
|
var parts []string
|
||||||
|
if ahead > 0 {
|
||||||
|
parts = append(parts, fmt.Sprintf("%d ahead", ahead))
|
||||||
|
}
|
||||||
|
if behind > 0 {
|
||||||
|
parts = append(parts, fmt.Sprintf("%d behind", behind))
|
||||||
|
}
|
||||||
|
emit("WARN", fmt.Sprintf("Git %s remote", strings.Join(parts, ", ")))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 14. Stores summary
|
||||||
|
stores, err := store.AllStores()
|
||||||
|
if err != nil {
|
||||||
|
emit("FAIL", fmt.Sprintf("Cannot list stores: %v", err))
|
||||||
|
} else if len(stores) == 0 {
|
||||||
|
emit("WARN", "No stores found")
|
||||||
|
} else {
|
||||||
|
var totalKeys, totalSecrets, parseErrors int
|
||||||
|
var totalSize int64
|
||||||
|
for _, name := range stores {
|
||||||
|
p, pErr := store.storePath(name)
|
||||||
|
if pErr != nil {
|
||||||
|
parseErrors++
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if fi, sErr := os.Stat(p); sErr == nil {
|
||||||
|
totalSize += fi.Size()
|
||||||
|
}
|
||||||
|
entries, rErr := readStoreFile(p, nil)
|
||||||
|
if rErr != nil {
|
||||||
|
parseErrors++
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
totalKeys += len(entries)
|
||||||
|
for _, e := range entries {
|
||||||
|
if e.Secret {
|
||||||
|
totalSecrets++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if parseErrors > 0 {
|
||||||
|
emit("FAIL", fmt.Sprintf("%d store(s), %d with parse errors", len(stores), parseErrors))
|
||||||
|
} else {
|
||||||
|
emit("ok", fmt.Sprintf("%d store(s), %d key(s), %d secret(s), %s total size",
|
||||||
|
len(stores), totalKeys, totalSecrets, formatSize(int(totalSize))))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if hasError {
|
||||||
|
emit("FAIL", "1 or more issues found")
|
||||||
|
} else {
|
||||||
|
emit("ok", "No issues found")
|
||||||
|
}
|
||||||
|
|
||||||
|
return hasError
|
||||||
|
}
|
||||||
|
|
||||||
|
func configDiffStrings(fields []ConfigField) []string {
|
||||||
|
var diffs []string
|
||||||
|
for _, f := range fields {
|
||||||
|
if !f.IsDefault {
|
||||||
|
diffs = append(diffs, fmt.Sprintf("%s: %v", f.Key, f.Value))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return diffs
|
||||||
|
}
|
||||||
|
|
||||||
|
func unexpectedFiles(dir string, allowed map[string]bool) bool {
|
||||||
|
entries, err := os.ReadDir(dir)
|
||||||
|
if err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
for _, e := range entries {
|
||||||
|
if !allowed[e.Name()] {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func unexpectedDataFiles(dir string) bool {
|
||||||
|
entries, err := os.ReadDir(dir)
|
||||||
|
if err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
for _, e := range entries {
|
||||||
|
name := e.Name()
|
||||||
|
if e.IsDir() && name == ".git" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if !e.IsDir() && (name == ".gitignore" || name == "identity.txt" || name == "recipients.txt" || filepath.Ext(name) == ".ndjson") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
154
cmd/doctor_test.go
Normal file
154
cmd/doctor_test.go
Normal file
|
|
@ -0,0 +1,154 @@
|
||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestDoctorCleanEnv(t *testing.T) {
|
||||||
|
dataDir := t.TempDir()
|
||||||
|
configDir := t.TempDir()
|
||||||
|
t.Setenv("PDA_DATA", dataDir)
|
||||||
|
t.Setenv("PDA_CONFIG", configDir)
|
||||||
|
saved := configErr
|
||||||
|
configErr = nil
|
||||||
|
t.Cleanup(func() { configErr = saved })
|
||||||
|
|
||||||
|
var buf bytes.Buffer
|
||||||
|
hasError := runDoctor(&buf)
|
||||||
|
out := buf.String()
|
||||||
|
|
||||||
|
if hasError {
|
||||||
|
t.Errorf("expected no errors, got hasError=true\noutput:\n%s", out)
|
||||||
|
}
|
||||||
|
for _, want := range []string{
|
||||||
|
version,
|
||||||
|
"Using default configuration",
|
||||||
|
"No identity file found",
|
||||||
|
"Git not initialised",
|
||||||
|
"No stores found",
|
||||||
|
} {
|
||||||
|
if !strings.Contains(out, want) {
|
||||||
|
t.Errorf("expected %q in output, got:\n%s", want, out)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDoctorWithStores(t *testing.T) {
|
||||||
|
dataDir := t.TempDir()
|
||||||
|
configDir := t.TempDir()
|
||||||
|
t.Setenv("PDA_DATA", dataDir)
|
||||||
|
t.Setenv("PDA_CONFIG", configDir)
|
||||||
|
saved := configErr
|
||||||
|
configErr = nil
|
||||||
|
t.Cleanup(func() { configErr = saved })
|
||||||
|
|
||||||
|
content := "{\"key\":\"foo\",\"value\":\"bar\",\"encoding\":\"text\"}\n" +
|
||||||
|
"{\"key\":\"baz\",\"value\":\"qux\",\"encoding\":\"text\"}\n"
|
||||||
|
if err := os.WriteFile(filepath.Join(dataDir, "test.ndjson"), []byte(content), 0o644); err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var buf bytes.Buffer
|
||||||
|
hasError := runDoctor(&buf)
|
||||||
|
out := buf.String()
|
||||||
|
|
||||||
|
if hasError {
|
||||||
|
t.Errorf("expected no errors, got hasError=true\noutput:\n%s", out)
|
||||||
|
}
|
||||||
|
if !strings.Contains(out, "1 store(s), 2 key(s), 0 secret(s)") {
|
||||||
|
t.Errorf("expected store summary in output, got:\n%s", out)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDoctorIdentityPermissions(t *testing.T) {
|
||||||
|
dataDir := t.TempDir()
|
||||||
|
configDir := t.TempDir()
|
||||||
|
t.Setenv("PDA_DATA", dataDir)
|
||||||
|
t.Setenv("PDA_CONFIG", configDir)
|
||||||
|
|
||||||
|
idPath := filepath.Join(dataDir, "identity.txt")
|
||||||
|
if err := os.WriteFile(idPath, []byte("placeholder\n"), 0o644); err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var buf bytes.Buffer
|
||||||
|
runDoctor(&buf)
|
||||||
|
out := buf.String()
|
||||||
|
|
||||||
|
if !strings.Contains(out, "Identity:") {
|
||||||
|
t.Errorf("expected 'Identity:' in output, got:\n%s", out)
|
||||||
|
}
|
||||||
|
if !strings.Contains(out, "should be 0600") {
|
||||||
|
t.Errorf("expected permissions warning in output, got:\n%s", out)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDoctorUndecodedKeys(t *testing.T) {
|
||||||
|
dataDir := t.TempDir()
|
||||||
|
configDir := t.TempDir()
|
||||||
|
t.Setenv("PDA_DATA", dataDir)
|
||||||
|
t.Setenv("PDA_CONFIG", configDir)
|
||||||
|
|
||||||
|
// Write a config with an unknown key.
|
||||||
|
cfgContent := "[store]\nno_such_key = true\n"
|
||||||
|
if err := os.WriteFile(filepath.Join(configDir, "config.toml"), []byte(cfgContent), 0o644); err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
savedCfg, savedUndecoded, savedErr := config, configUndecodedKeys, configErr
|
||||||
|
config, configUndecodedKeys, _, configErr = loadConfig()
|
||||||
|
t.Cleanup(func() {
|
||||||
|
config, configUndecodedKeys, configErr = savedCfg, savedUndecoded, savedErr
|
||||||
|
})
|
||||||
|
|
||||||
|
var buf bytes.Buffer
|
||||||
|
runDoctor(&buf)
|
||||||
|
out := buf.String()
|
||||||
|
|
||||||
|
if !strings.Contains(out, "Unrecognised config key") {
|
||||||
|
t.Errorf("expected undecoded key warning, got:\n%s", out)
|
||||||
|
}
|
||||||
|
if !strings.Contains(out, "store.no_such_key") {
|
||||||
|
t.Errorf("expected 'store.no_such_key' in output, got:\n%s", out)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDoctorGitInitialised(t *testing.T) {
|
||||||
|
dataDir := t.TempDir()
|
||||||
|
configDir := t.TempDir()
|
||||||
|
t.Setenv("PDA_DATA", dataDir)
|
||||||
|
t.Setenv("PDA_CONFIG", configDir)
|
||||||
|
|
||||||
|
cmd := exec.Command("git", "init")
|
||||||
|
cmd.Dir = dataDir
|
||||||
|
if err := cmd.Run(); err != nil {
|
||||||
|
t.Skipf("git not available: %v", err)
|
||||||
|
}
|
||||||
|
cmd = exec.Command("git", "commit", "--allow-empty", "-m", "init")
|
||||||
|
cmd.Dir = dataDir
|
||||||
|
cmd.Env = append(os.Environ(),
|
||||||
|
"GIT_AUTHOR_NAME=test",
|
||||||
|
"GIT_AUTHOR_EMAIL=test@test",
|
||||||
|
"GIT_COMMITTER_NAME=test",
|
||||||
|
"GIT_COMMITTER_EMAIL=test@test",
|
||||||
|
)
|
||||||
|
if err := cmd.Run(); err != nil {
|
||||||
|
t.Fatalf("git commit: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var buf bytes.Buffer
|
||||||
|
runDoctor(&buf)
|
||||||
|
out := buf.String()
|
||||||
|
|
||||||
|
if !strings.Contains(out, "Git initialised on") {
|
||||||
|
t.Errorf("expected 'Git initialised on' in output, got:\n%s", out)
|
||||||
|
}
|
||||||
|
if !strings.Contains(out, "No git remote configured") {
|
||||||
|
t.Errorf("expected 'No git remote configured' in output, got:\n%s", out)
|
||||||
|
}
|
||||||
|
}
|
||||||
219
cmd/dump.go
219
cmd/dump.go
|
|
@ -1,219 +0,0 @@
|
||||||
/*
|
|
||||||
Copyright © 2025 Lewis Wynne <lew@ily.rs>
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in
|
|
||||||
all copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
|
||||||
THE SOFTWARE.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package cmd
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/base64"
|
|
||||||
"encoding/json"
|
|
||||||
"errors"
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
"strings"
|
|
||||||
"unicode/utf8"
|
|
||||||
|
|
||||||
"github.com/dgraph-io/badger/v4"
|
|
||||||
"github.com/gobwas/glob"
|
|
||||||
"github.com/spf13/cobra"
|
|
||||||
)
|
|
||||||
|
|
||||||
type dumpEntry struct {
|
|
||||||
Key string `json:"key"`
|
|
||||||
Value string `json:"value"`
|
|
||||||
Encoding string `json:"encoding,omitempty"`
|
|
||||||
Secret bool `json:"secret,omitempty"`
|
|
||||||
ExpiresAt *int64 `json:"expires_at,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
var dumpCmd = &cobra.Command{
|
|
||||||
Use: "dump [DB]",
|
|
||||||
Short: "Dump all key/value pairs as NDJSON",
|
|
||||||
Aliases: []string{"export"},
|
|
||||||
Args: cobra.MaximumNArgs(1),
|
|
||||||
RunE: dump,
|
|
||||||
SilenceUsage: true,
|
|
||||||
}
|
|
||||||
|
|
||||||
func dump(cmd *cobra.Command, args []string) error {
|
|
||||||
store := &Store{}
|
|
||||||
targetDB := "@" + config.Store.DefaultStoreName
|
|
||||||
if len(args) == 1 {
|
|
||||||
rawArg := args[0]
|
|
||||||
dbName, err := store.parseDB(rawArg, false)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("cannot dump '%s': %v", rawArg, err)
|
|
||||||
}
|
|
||||||
if _, err := store.FindStore(dbName); err != nil {
|
|
||||||
var notFound errNotFound
|
|
||||||
if errors.As(err, ¬Found) {
|
|
||||||
return fmt.Errorf("cannot dump '%s': %v", rawArg, err)
|
|
||||||
}
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
targetDB = "@" + dbName
|
|
||||||
}
|
|
||||||
|
|
||||||
mode, err := cmd.Flags().GetString("encoding")
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("cannot dump '%s': %v", targetDB, err)
|
|
||||||
}
|
|
||||||
switch mode {
|
|
||||||
case "auto", "base64", "text":
|
|
||||||
default:
|
|
||||||
return fmt.Errorf("cannot dump '%s': unsupported encoding '%s'", targetDB, mode)
|
|
||||||
}
|
|
||||||
|
|
||||||
includeSecret, err := cmd.Flags().GetBool("secret")
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
globPatterns, err := cmd.Flags().GetStringSlice("glob")
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("cannot dump '%s': %v", targetDB, err)
|
|
||||||
}
|
|
||||||
separators, err := parseGlobSeparators(cmd)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("cannot dump '%s': %v", targetDB, err)
|
|
||||||
}
|
|
||||||
matchers, err := compileGlobMatchers(globPatterns, separators)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("cannot dump '%s': %v", targetDB, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
opts := DumpOptions{
|
|
||||||
Encoding: mode,
|
|
||||||
IncludeSecret: includeSecret,
|
|
||||||
Matchers: matchers,
|
|
||||||
GlobPatterns: globPatterns,
|
|
||||||
}
|
|
||||||
return dumpDatabase(store, strings.TrimPrefix(targetDB, "@"), cmd.OutOrStdout(), opts)
|
|
||||||
}
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
dumpCmd.Flags().StringP("encoding", "e", "auto", "value encoding: auto, base64, or text")
|
|
||||||
dumpCmd.Flags().Bool("secret", false, "Include entries marked as secret")
|
|
||||||
dumpCmd.Flags().StringSliceP("glob", "g", nil, "Filter keys with glob pattern (repeatable)")
|
|
||||||
dumpCmd.Flags().String("glob-sep", "", fmt.Sprintf("Characters treated as separators for globbing (default %q)", defaultGlobSeparatorsDisplay()))
|
|
||||||
rootCmd.AddCommand(dumpCmd)
|
|
||||||
}
|
|
||||||
|
|
||||||
func encodeBase64(entry *dumpEntry, v []byte) {
|
|
||||||
entry.Value = base64.StdEncoding.EncodeToString(v)
|
|
||||||
entry.Encoding = "base64"
|
|
||||||
}
|
|
||||||
|
|
||||||
func encodeText(entry *dumpEntry, key []byte, v []byte) error {
|
|
||||||
if !utf8.Valid(v) {
|
|
||||||
return fmt.Errorf("key %q contains non-UTF8 data; use --encoding=auto or base64", key)
|
|
||||||
}
|
|
||||||
entry.Value = string(v)
|
|
||||||
entry.Encoding = "text"
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// DumpOptions controls how a database is dumped to NDJSON.
|
|
||||||
type DumpOptions struct {
|
|
||||||
Encoding string
|
|
||||||
IncludeSecret bool
|
|
||||||
Matchers []glob.Glob
|
|
||||||
GlobPatterns []string
|
|
||||||
}
|
|
||||||
|
|
||||||
// dumpDatabase writes entries from dbName to w as NDJSON.
|
|
||||||
func dumpDatabase(store *Store, dbName string, w io.Writer, opts DumpOptions) error {
|
|
||||||
targetDB := "@" + dbName
|
|
||||||
if opts.Encoding == "" {
|
|
||||||
opts.Encoding = "auto"
|
|
||||||
}
|
|
||||||
|
|
||||||
var matched bool
|
|
||||||
trans := TransactionArgs{
|
|
||||||
key: targetDB,
|
|
||||||
readonly: true,
|
|
||||||
sync: true,
|
|
||||||
transact: func(tx *badger.Txn, k []byte) error {
|
|
||||||
it := tx.NewIterator(badger.DefaultIteratorOptions)
|
|
||||||
defer it.Close()
|
|
||||||
for it.Rewind(); it.Valid(); it.Next() {
|
|
||||||
item := it.Item()
|
|
||||||
key := item.KeyCopy(nil)
|
|
||||||
if !globMatch(opts.Matchers, string(key)) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
meta := item.UserMeta()
|
|
||||||
isSecret := meta&metaSecret != 0
|
|
||||||
if isSecret && !opts.IncludeSecret {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
expiresAt := item.ExpiresAt()
|
|
||||||
if err := item.Value(func(v []byte) error {
|
|
||||||
entry := dumpEntry{
|
|
||||||
Key: string(key),
|
|
||||||
Secret: isSecret,
|
|
||||||
}
|
|
||||||
if expiresAt > 0 {
|
|
||||||
ts := int64(expiresAt)
|
|
||||||
entry.ExpiresAt = &ts
|
|
||||||
}
|
|
||||||
switch opts.Encoding {
|
|
||||||
case "base64":
|
|
||||||
encodeBase64(&entry, v)
|
|
||||||
case "text":
|
|
||||||
if err := encodeText(&entry, key, v); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
case "auto":
|
|
||||||
if utf8.Valid(v) {
|
|
||||||
entry.Encoding = "text"
|
|
||||||
entry.Value = string(v)
|
|
||||||
} else {
|
|
||||||
encodeBase64(&entry, v)
|
|
||||||
}
|
|
||||||
default:
|
|
||||||
return fmt.Errorf("unsupported encoding '%s'", opts.Encoding)
|
|
||||||
}
|
|
||||||
payload, err := json.Marshal(entry)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
_, err = fmt.Fprintln(w, string(payload))
|
|
||||||
if err == nil {
|
|
||||||
matched = true
|
|
||||||
}
|
|
||||||
return err
|
|
||||||
}); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := store.Transaction(trans); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(opts.Matchers) > 0 && !matched {
|
|
||||||
return fmt.Errorf("No matches for pattern %s", formatGlobPatterns(opts.GlobPatterns))
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
258
cmd/edit.go
Normal file
258
cmd/edit.go
Normal file
|
|
@ -0,0 +1,258 @@
|
||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/base64"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
"unicode/utf8"
|
||||||
|
|
||||||
|
"filippo.io/age"
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
)
|
||||||
|
|
||||||
|
var editCmd = &cobra.Command{
|
||||||
|
Use: "edit KEY[@STORE]",
|
||||||
|
Short: "Edit a key's value in $EDITOR",
|
||||||
|
Long: `Open a key's value in $EDITOR. If the key doesn't exist, opens an
|
||||||
|
empty file — saving non-empty content creates the key.
|
||||||
|
|
||||||
|
Binary values are presented as base64 for editing and decoded back on save.
|
||||||
|
|
||||||
|
Metadata flags (--ttl, --encrypt, --decrypt) can be passed alongside the edit
|
||||||
|
to modify metadata in the same operation.`,
|
||||||
|
Aliases: []string{"e"},
|
||||||
|
Args: cobra.ExactArgs(1),
|
||||||
|
ValidArgsFunction: completeKeys,
|
||||||
|
RunE: edit,
|
||||||
|
SilenceUsage: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
func edit(cmd *cobra.Command, args []string) error {
|
||||||
|
editor := os.Getenv("EDITOR")
|
||||||
|
if editor == "" {
|
||||||
|
return withHint(
|
||||||
|
fmt.Errorf("EDITOR not set"),
|
||||||
|
"set $EDITOR to your preferred text editor",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
store := &Store{}
|
||||||
|
|
||||||
|
spec, err := store.parseKey(args[0], true)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot edit '%s': %v", args[0], err)
|
||||||
|
}
|
||||||
|
|
||||||
|
ttlStr, _ := cmd.Flags().GetString("ttl")
|
||||||
|
encryptFlag, _ := cmd.Flags().GetBool("encrypt")
|
||||||
|
decryptFlag, _ := cmd.Flags().GetBool("decrypt")
|
||||||
|
preserveNewline, _ := cmd.Flags().GetBool("preserve-newline")
|
||||||
|
force, _ := cmd.Flags().GetBool("force")
|
||||||
|
readonlyFlag, _ := cmd.Flags().GetBool("readonly")
|
||||||
|
writableFlag, _ := cmd.Flags().GetBool("writable")
|
||||||
|
pinFlag, _ := cmd.Flags().GetBool("pin")
|
||||||
|
unpinFlag, _ := cmd.Flags().GetBool("unpin")
|
||||||
|
|
||||||
|
if encryptFlag && decryptFlag {
|
||||||
|
return fmt.Errorf("cannot edit '%s': --encrypt and --decrypt are mutually exclusive", args[0])
|
||||||
|
}
|
||||||
|
if readonlyFlag && writableFlag {
|
||||||
|
return fmt.Errorf("cannot edit '%s': --readonly and --writable are mutually exclusive", args[0])
|
||||||
|
}
|
||||||
|
if pinFlag && unpinFlag {
|
||||||
|
return fmt.Errorf("cannot edit '%s': --pin and --unpin are mutually exclusive", args[0])
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load identity
|
||||||
|
var identity *age.X25519Identity
|
||||||
|
if encryptFlag {
|
||||||
|
identity, err = ensureIdentity()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot edit '%s': %v", args[0], err)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
identity, _ = loadIdentity()
|
||||||
|
}
|
||||||
|
recipients, err := allRecipients(identity)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot edit '%s': %v", args[0], err)
|
||||||
|
}
|
||||||
|
|
||||||
|
p, err := store.storePath(spec.DB)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot edit '%s': %v", args[0], err)
|
||||||
|
}
|
||||||
|
entries, err := readStoreFile(p, identity)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot edit '%s': %v", args[0], err)
|
||||||
|
}
|
||||||
|
idx := findEntry(entries, spec.Key)
|
||||||
|
|
||||||
|
creating := idx < 0
|
||||||
|
var original []byte
|
||||||
|
var wasBinary bool
|
||||||
|
var entry *Entry
|
||||||
|
|
||||||
|
if creating {
|
||||||
|
original = nil
|
||||||
|
} else {
|
||||||
|
entry = &entries[idx]
|
||||||
|
if entry.ReadOnly && !force {
|
||||||
|
return fmt.Errorf("cannot edit '%s': key is read-only", args[0])
|
||||||
|
}
|
||||||
|
if entry.Locked {
|
||||||
|
return fmt.Errorf("cannot edit '%s': secret is locked (identity file missing)", args[0])
|
||||||
|
}
|
||||||
|
original = entry.Value
|
||||||
|
wasBinary = !utf8.Valid(original)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Prepare temp file content
|
||||||
|
var tmpContent []byte
|
||||||
|
if wasBinary {
|
||||||
|
tmpContent = []byte(base64.StdEncoding.EncodeToString(original))
|
||||||
|
} else {
|
||||||
|
tmpContent = original
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write to temp file
|
||||||
|
tmpFile, err := os.CreateTemp("", "pda-edit-*")
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot edit '%s': %v", args[0], err)
|
||||||
|
}
|
||||||
|
tmpPath := tmpFile.Name()
|
||||||
|
defer os.Remove(tmpPath)
|
||||||
|
|
||||||
|
if _, err := tmpFile.Write(tmpContent); err != nil {
|
||||||
|
tmpFile.Close()
|
||||||
|
return fmt.Errorf("cannot edit '%s': %v", args[0], err)
|
||||||
|
}
|
||||||
|
if err := tmpFile.Close(); err != nil {
|
||||||
|
return fmt.Errorf("cannot edit '%s': %v", args[0], err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Launch editor
|
||||||
|
c := exec.Command(editor, tmpPath)
|
||||||
|
c.Stdin = os.Stdin
|
||||||
|
c.Stdout = os.Stdout
|
||||||
|
c.Stderr = os.Stderr
|
||||||
|
if err := c.Run(); err != nil {
|
||||||
|
return fmt.Errorf("cannot edit '%s': editor failed: %v", args[0], err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read back
|
||||||
|
edited, err := os.ReadFile(tmpPath)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot edit '%s': %v", args[0], err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Decode base64 if original was binary; strip trailing newlines for text
|
||||||
|
// unless --preserve-newline is set
|
||||||
|
var newValue []byte
|
||||||
|
if wasBinary {
|
||||||
|
decoded, err := base64.StdEncoding.DecodeString(string(bytes.TrimSpace(edited)))
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot edit '%s': invalid base64: %v", args[0], err)
|
||||||
|
}
|
||||||
|
newValue = decoded
|
||||||
|
} else if preserveNewline {
|
||||||
|
newValue = edited
|
||||||
|
} else {
|
||||||
|
newValue = bytes.TrimRight(edited, "\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for no-op
|
||||||
|
noMetaFlags := ttlStr == "" && !encryptFlag && !decryptFlag && !readonlyFlag && !writableFlag && !pinFlag && !unpinFlag
|
||||||
|
if bytes.Equal(original, newValue) && noMetaFlags {
|
||||||
|
infof("no changes to '%s'", spec.Display())
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Creating: empty save means abort
|
||||||
|
if creating && len(newValue) == 0 && noMetaFlags {
|
||||||
|
infof("empty value, nothing saved")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build or update entry
|
||||||
|
if creating {
|
||||||
|
newEntry := Entry{
|
||||||
|
Key: spec.Key,
|
||||||
|
Value: newValue,
|
||||||
|
Secret: encryptFlag,
|
||||||
|
ReadOnly: readonlyFlag,
|
||||||
|
Pinned: pinFlag,
|
||||||
|
}
|
||||||
|
if ttlStr != "" {
|
||||||
|
expiresAt, err := parseTTLString(ttlStr)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot edit '%s': %v", args[0], err)
|
||||||
|
}
|
||||||
|
newEntry.ExpiresAt = expiresAt
|
||||||
|
}
|
||||||
|
entries = append(entries, newEntry)
|
||||||
|
} else {
|
||||||
|
entry.Value = newValue
|
||||||
|
|
||||||
|
if ttlStr != "" {
|
||||||
|
expiresAt, err := parseTTLString(ttlStr)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot edit '%s': %v", args[0], err)
|
||||||
|
}
|
||||||
|
entry.ExpiresAt = expiresAt
|
||||||
|
}
|
||||||
|
|
||||||
|
if encryptFlag {
|
||||||
|
if entry.Secret {
|
||||||
|
return fmt.Errorf("cannot edit '%s': already encrypted", args[0])
|
||||||
|
}
|
||||||
|
entry.Secret = true
|
||||||
|
}
|
||||||
|
if decryptFlag {
|
||||||
|
if !entry.Secret {
|
||||||
|
return fmt.Errorf("cannot edit '%s': not encrypted", args[0])
|
||||||
|
}
|
||||||
|
entry.Secret = false
|
||||||
|
}
|
||||||
|
|
||||||
|
if readonlyFlag {
|
||||||
|
entry.ReadOnly = true
|
||||||
|
}
|
||||||
|
if writableFlag {
|
||||||
|
entry.ReadOnly = false
|
||||||
|
}
|
||||||
|
if pinFlag {
|
||||||
|
entry.Pinned = true
|
||||||
|
}
|
||||||
|
if unpinFlag {
|
||||||
|
entry.Pinned = false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := writeStoreFile(p, entries, recipients); err != nil {
|
||||||
|
return fmt.Errorf("cannot edit '%s': %v", args[0], err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if creating {
|
||||||
|
okf("created '%s'", spec.Display())
|
||||||
|
} else {
|
||||||
|
okf("updated '%s'", spec.Display())
|
||||||
|
}
|
||||||
|
|
||||||
|
return autoSync("edit " + spec.Display())
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
editCmd.Flags().String("ttl", "", "set expiry (e.g. 30m, 2h) or 'never' to clear")
|
||||||
|
editCmd.Flags().BoolP("encrypt", "e", false, "encrypt the value at rest")
|
||||||
|
editCmd.Flags().BoolP("decrypt", "d", false, "decrypt the value (store as plaintext)")
|
||||||
|
editCmd.Flags().Bool("preserve-newline", false, "keep trailing newlines added by the editor")
|
||||||
|
editCmd.Flags().Bool("force", false, "bypass read-only protection")
|
||||||
|
editCmd.Flags().Bool("readonly", false, "mark the key as read-only")
|
||||||
|
editCmd.Flags().Bool("writable", false, "clear the read-only flag")
|
||||||
|
editCmd.Flags().Bool("pin", false, "pin the key (sorts to top in list)")
|
||||||
|
editCmd.Flags().Bool("unpin", false, "unpin the key")
|
||||||
|
rootCmd.AddCommand(editCmd)
|
||||||
|
}
|
||||||
113
cmd/edit_test.go
Normal file
113
cmd/edit_test.go
Normal file
|
|
@ -0,0 +1,113 @@
|
||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"filippo.io/age"
|
||||||
|
)
|
||||||
|
|
||||||
|
func setupEditTest(t *testing.T) (*age.X25519Identity, string) {
|
||||||
|
t.Helper()
|
||||||
|
dataDir := t.TempDir()
|
||||||
|
configDir := t.TempDir()
|
||||||
|
t.Setenv("PDA_DATA", dataDir)
|
||||||
|
t.Setenv("PDA_CONFIG", configDir)
|
||||||
|
|
||||||
|
id, err := age.GenerateX25519Identity()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
if err := os.WriteFile(filepath.Join(dataDir, "identity.txt"), []byte(id.String()+"\n"), 0o600); err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Reset global config to defaults with test env vars active
|
||||||
|
config, _, _, _ = loadConfig()
|
||||||
|
|
||||||
|
return id, dataDir
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestEditCreatesNewKey(t *testing.T) {
|
||||||
|
id, _ := setupEditTest(t)
|
||||||
|
|
||||||
|
// Create editor script that writes "hello"
|
||||||
|
script := filepath.Join(t.TempDir(), "editor.sh")
|
||||||
|
if err := os.WriteFile(script, []byte("#!/bin/sh\necho hello > \"$1\"\n"), 0o755); err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
t.Setenv("EDITOR", script)
|
||||||
|
|
||||||
|
// Run edit for a new key
|
||||||
|
rootCmd.SetArgs([]string{"edit", "newkey@testedit"})
|
||||||
|
if err := rootCmd.Execute(); err != nil {
|
||||||
|
t.Fatalf("edit failed: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify key was created
|
||||||
|
store := &Store{}
|
||||||
|
p, _ := store.storePath("testedit")
|
||||||
|
entries, _ := readStoreFile(p, id)
|
||||||
|
idx := findEntry(entries, "newkey")
|
||||||
|
if idx < 0 {
|
||||||
|
t.Fatal("key was not created")
|
||||||
|
}
|
||||||
|
if string(entries[idx].Value) != "hello" {
|
||||||
|
t.Fatalf("unexpected value: %q", entries[idx].Value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestEditModifiesExistingKey(t *testing.T) {
|
||||||
|
id, _ := setupEditTest(t)
|
||||||
|
|
||||||
|
// Create an existing key
|
||||||
|
store := &Store{}
|
||||||
|
p, _ := store.storePath("testedit2")
|
||||||
|
entries := []Entry{{Key: "existing", Value: []byte("original")}}
|
||||||
|
if err := writeStoreFile(p, entries, nil); err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Editor that replaces content
|
||||||
|
script := filepath.Join(t.TempDir(), "editor.sh")
|
||||||
|
if err := os.WriteFile(script, []byte("#!/bin/sh\necho modified > \"$1\"\n"), 0o755); err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
t.Setenv("EDITOR", script)
|
||||||
|
|
||||||
|
rootCmd.SetArgs([]string{"edit", "existing@testedit2"})
|
||||||
|
if err := rootCmd.Execute(); err != nil {
|
||||||
|
t.Fatalf("edit failed: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify
|
||||||
|
entries, _ = readStoreFile(p, id)
|
||||||
|
idx := findEntry(entries, "existing")
|
||||||
|
if idx < 0 {
|
||||||
|
t.Fatal("key disappeared")
|
||||||
|
}
|
||||||
|
if string(entries[idx].Value) != "modified" {
|
||||||
|
t.Fatalf("unexpected value: %q", entries[idx].Value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestEditNoChangeSkipsWrite(t *testing.T) {
|
||||||
|
setupEditTest(t)
|
||||||
|
|
||||||
|
store := &Store{}
|
||||||
|
p, _ := store.storePath("testedit3")
|
||||||
|
entries := []Entry{{Key: "unchanged", Value: []byte("same")}}
|
||||||
|
if err := writeStoreFile(p, entries, nil); err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// "true" command does nothing — file stays the same
|
||||||
|
t.Setenv("EDITOR", "true")
|
||||||
|
|
||||||
|
rootCmd.SetArgs([]string{"edit", "unchanged@testedit3"})
|
||||||
|
if err := rootCmd.Execute(); err != nil {
|
||||||
|
t.Fatalf("edit failed: %v", err)
|
||||||
|
}
|
||||||
|
// Should print "no changes" — we just verify it didn't error
|
||||||
|
}
|
||||||
48
cmd/export.go
Normal file
48
cmd/export.go
Normal file
|
|
@ -0,0 +1,48 @@
|
||||||
|
/*
|
||||||
|
Copyright © 2025 Lewis Wynne <lew@ily.rs>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
)
|
||||||
|
|
||||||
|
var exportCmd = &cobra.Command{
|
||||||
|
Use: "export [STORE]",
|
||||||
|
Short: "Export store as NDJSON (alias for list --format ndjson)",
|
||||||
|
Aliases: []string{},
|
||||||
|
Args: cobra.MaximumNArgs(1),
|
||||||
|
ValidArgsFunction: completeStores,
|
||||||
|
RunE: func(cmd *cobra.Command, args []string) error {
|
||||||
|
listFormat = "ndjson"
|
||||||
|
return list(cmd, args)
|
||||||
|
},
|
||||||
|
SilenceUsage: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
exportCmd.Flags().StringSliceP("key", "k", nil, "filter keys with glob pattern (repeatable)")
|
||||||
|
exportCmd.Flags().StringSliceP("store", "s", nil, "filter stores with glob pattern (repeatable)")
|
||||||
|
exportCmd.RegisterFlagCompletionFunc("store", completeStoreFlag)
|
||||||
|
exportCmd.Flags().StringSliceP("value", "v", nil, "filter values with glob pattern (repeatable)")
|
||||||
|
rootCmd.AddCommand(exportCmd)
|
||||||
|
}
|
||||||
174
cmd/get.go
174
cmd/get.go
|
|
@ -27,20 +27,18 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
"os/exec"
|
"os/exec"
|
||||||
"slices"
|
|
||||||
"strconv"
|
|
||||||
"strings"
|
"strings"
|
||||||
"text/template"
|
"text/template"
|
||||||
|
"unicode/utf8"
|
||||||
|
|
||||||
"github.com/dgraph-io/badger/v4"
|
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
)
|
)
|
||||||
|
|
||||||
// getCmd represents the get command
|
// getCmd represents the get command
|
||||||
var getCmd = &cobra.Command{
|
var getCmd = &cobra.Command{
|
||||||
Use: "get KEY[@DB]",
|
Use: "get KEY[@STORE]",
|
||||||
Short: "Get a value for a key. Optionally specify a db.",
|
Short: "Get the value of a key",
|
||||||
Long: `Get a value for a key. Optionally specify a db.
|
Long: `Get the value of a key. Optionally specify a store.
|
||||||
|
|
||||||
{{ .TEMPLATES }} can be filled by passing TEMPLATE=VALUE as an
|
{{ .TEMPLATES }} can be filled by passing TEMPLATE=VALUE as an
|
||||||
additional argument after the initial KEY being fetched.
|
additional argument after the initial KEY being fetched.
|
||||||
|
|
@ -48,50 +46,71 @@ additional argument after the initial KEY being fetched.
|
||||||
For example:
|
For example:
|
||||||
pda set greeting 'Hello, {{ .NAME }}!'
|
pda set greeting 'Hello, {{ .NAME }}!'
|
||||||
pda get greeting NAME=World`,
|
pda get greeting NAME=World`,
|
||||||
Aliases: []string{"g"},
|
Aliases: []string{"g"},
|
||||||
Args: cobra.MinimumNArgs(1),
|
Args: cobra.MinimumNArgs(1),
|
||||||
RunE: get,
|
ValidArgsFunction: completeKeys,
|
||||||
|
RunE: get,
|
||||||
|
SilenceUsage: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
var runCmd = &cobra.Command{
|
||||||
|
Use: "run KEY[@STORE]",
|
||||||
|
Short: "Get the value of a key and execute it",
|
||||||
|
Long: `Get the value of a key and execute it as a shell command. Optionally specify a store.
|
||||||
|
|
||||||
|
{{ .TEMPLATES }} can be filled by passing TEMPLATE=VALUE as an
|
||||||
|
additional argument after the initial KEY being fetched.
|
||||||
|
|
||||||
|
For example:
|
||||||
|
pda set greeting 'Hello, {{ .NAME }}!'
|
||||||
|
pda run greeting NAME=World`,
|
||||||
|
Args: cobra.MinimumNArgs(1),
|
||||||
|
ValidArgsFunction: completeKeys,
|
||||||
|
RunE: run,
|
||||||
SilenceUsage: true,
|
SilenceUsage: true,
|
||||||
}
|
}
|
||||||
|
|
||||||
func get(cmd *cobra.Command, args []string) error {
|
func get(cmd *cobra.Command, args []string) error {
|
||||||
store := &Store{}
|
store := &Store{}
|
||||||
|
|
||||||
var v []byte
|
identity, _ := loadIdentity()
|
||||||
var meta byte
|
|
||||||
trans := TransactionArgs{
|
|
||||||
key: args[0],
|
|
||||||
readonly: true,
|
|
||||||
sync: false,
|
|
||||||
transact: func(tx *badger.Txn, k []byte) error {
|
|
||||||
item, err := tx.Get(k)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
meta = item.UserMeta()
|
|
||||||
v, err = item.ValueCopy(nil)
|
|
||||||
return err
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := store.Transaction(trans); err != nil {
|
spec, err := store.parseKey(args[0], true)
|
||||||
return fmt.Errorf("cannot get '%s': %v", args[0], err)
|
|
||||||
}
|
|
||||||
|
|
||||||
includeSecret, err := cmd.Flags().GetBool("secret")
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("cannot get '%s': %v", args[0], err)
|
return fmt.Errorf("cannot get '%s': %v", args[0], err)
|
||||||
}
|
}
|
||||||
if meta&metaSecret != 0 && !includeSecret {
|
p, err := store.storePath(spec.DB)
|
||||||
return fmt.Errorf("cannot get '%s': marked as secret, run with --secret", args[0])
|
|
||||||
}
|
|
||||||
|
|
||||||
binary, err := cmd.Flags().GetBool("include-binary")
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("cannot get '%s': %v", args[0], err)
|
return fmt.Errorf("cannot get '%s': %v", args[0], err)
|
||||||
}
|
}
|
||||||
|
entries, err := readStoreFile(p, identity)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot get '%s': %v", args[0], err)
|
||||||
|
}
|
||||||
|
idx := findEntry(entries, spec.Key)
|
||||||
|
|
||||||
run, err := cmd.Flags().GetBool("run")
|
existsOnly, _ := cmd.Flags().GetBool("exists")
|
||||||
|
if existsOnly {
|
||||||
|
if idx < 0 {
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if idx < 0 {
|
||||||
|
keys := make([]string, len(entries))
|
||||||
|
for i, e := range entries {
|
||||||
|
keys[i] = e.Key
|
||||||
|
}
|
||||||
|
return fmt.Errorf("cannot get '%s': %w", args[0], suggestKey(spec.Key, keys))
|
||||||
|
}
|
||||||
|
entry := entries[idx]
|
||||||
|
if entry.Locked {
|
||||||
|
return fmt.Errorf("cannot get '%s': secret is locked (identity file missing)", spec.Display())
|
||||||
|
}
|
||||||
|
v := entry.Value
|
||||||
|
|
||||||
|
binary, err := cmd.Flags().GetBool("base64")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("cannot get '%s': %v", args[0], err)
|
return fmt.Errorf("cannot get '%s': %v", args[0], err)
|
||||||
}
|
}
|
||||||
|
|
@ -101,7 +120,7 @@ func get(cmd *cobra.Command, args []string) error {
|
||||||
return fmt.Errorf("cannot get '%s': %v", args[0], err)
|
return fmt.Errorf("cannot get '%s': %v", args[0], err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if !noTemplate {
|
if !noTemplate && utf8.Valid(v) {
|
||||||
var substitutions []string
|
var substitutions []string
|
||||||
if len(args) > 1 {
|
if len(args) > 1 {
|
||||||
substitutions = args[1:]
|
substitutions = args[1:]
|
||||||
|
|
@ -112,8 +131,8 @@ func get(cmd *cobra.Command, args []string) error {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if run {
|
if runFlag {
|
||||||
return runCmd(string(v))
|
return runShellCommand(string(v))
|
||||||
}
|
}
|
||||||
|
|
||||||
store.Print("%s", binary, v)
|
store.Print("%s", binary, v)
|
||||||
|
|
@ -125,58 +144,16 @@ func applyTemplate(tplBytes []byte, substitutions []string) ([]byte, error) {
|
||||||
for _, s := range substitutions {
|
for _, s := range substitutions {
|
||||||
parts := strings.SplitN(s, "=", 2)
|
parts := strings.SplitN(s, "=", 2)
|
||||||
if len(parts) != 2 || parts[0] == "" {
|
if len(parts) != 2 || parts[0] == "" {
|
||||||
fmt.Fprintf(os.Stderr, "invalid substitutions %q (expected KEY=VALUE)\n", s)
|
warnf("invalid substitution '%s', expected KEY=VALUE", s)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
key := parts[0]
|
key := parts[0]
|
||||||
val := parts[1]
|
val := parts[1]
|
||||||
vars[key] = val
|
vars[key] = val
|
||||||
}
|
}
|
||||||
funcMap := template.FuncMap{
|
funcMap := templateFuncMap()
|
||||||
"require": func(v any) (string, error) {
|
funcMap["pda"] = func(key string) (string, error) {
|
||||||
s := fmt.Sprint(v)
|
return pdaGet(key, substitutions)
|
||||||
if s == "" {
|
|
||||||
return "", fmt.Errorf("required value is missing or empty")
|
|
||||||
}
|
|
||||||
return s, nil
|
|
||||||
},
|
|
||||||
"default": func(def string, v any) string {
|
|
||||||
s := fmt.Sprint(v)
|
|
||||||
if s == "" {
|
|
||||||
return def
|
|
||||||
}
|
|
||||||
return s
|
|
||||||
},
|
|
||||||
"env": os.Getenv,
|
|
||||||
"enum": func(v any, allowed ...string) (string, error) {
|
|
||||||
s := fmt.Sprint(v)
|
|
||||||
if s == "" {
|
|
||||||
return "", fmt.Errorf("enum value is missing or empty")
|
|
||||||
}
|
|
||||||
if slices.Contains(allowed, s) {
|
|
||||||
return s, nil
|
|
||||||
}
|
|
||||||
return "", fmt.Errorf("invalid value %q (allowed: %v)", s, allowed)
|
|
||||||
},
|
|
||||||
"int": func(v any) (int, error) {
|
|
||||||
s := fmt.Sprint(v)
|
|
||||||
i, err := strconv.Atoi(s)
|
|
||||||
if err != nil {
|
|
||||||
return 0, fmt.Errorf("failed to convert to int: %w", err)
|
|
||||||
}
|
|
||||||
return i, nil
|
|
||||||
},
|
|
||||||
"list": func(v any) []string {
|
|
||||||
s := fmt.Sprint(v)
|
|
||||||
if s == "" {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
parts := strings.Split(s, ",")
|
|
||||||
for i := range parts {
|
|
||||||
parts[i] = strings.TrimSpace(parts[i])
|
|
||||||
}
|
|
||||||
return parts
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
tpl, err := template.New("cmd").
|
tpl, err := template.New("cmd").
|
||||||
Delims("{{", "}}").
|
Delims("{{", "}}").
|
||||||
|
|
@ -189,12 +166,12 @@ func applyTemplate(tplBytes []byte, substitutions []string) ([]byte, error) {
|
||||||
}
|
}
|
||||||
var buf bytes.Buffer
|
var buf bytes.Buffer
|
||||||
if err := tpl.Execute(&buf, vars); err != nil {
|
if err := tpl.Execute(&buf, vars); err != nil {
|
||||||
return nil, err
|
return nil, cleanTemplateError(err)
|
||||||
}
|
}
|
||||||
return buf.Bytes(), nil
|
return buf.Bytes(), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func runCmd(command string) error {
|
func runShellCommand(command string) error {
|
||||||
shell := os.Getenv("SHELL")
|
shell := os.Getenv("SHELL")
|
||||||
if shell == "" {
|
if shell == "" {
|
||||||
shell = "/bin/sh"
|
shell = "/bin/sh"
|
||||||
|
|
@ -218,10 +195,21 @@ func runCmd(command string) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func run(cmd *cobra.Command, args []string) error {
|
||||||
getCmd.Flags().BoolP("include-binary", "b", false, "include binary data in text output")
|
runFlag = true
|
||||||
getCmd.Flags().Bool("secret", false, "display values marked as secret")
|
return get(cmd, args)
|
||||||
getCmd.Flags().BoolP("run", "c", false, "execute the result as a shell command")
|
}
|
||||||
getCmd.Flags().Bool("no-template", false, "directly output template syntax")
|
|
||||||
rootCmd.AddCommand(getCmd)
|
var runFlag bool
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
getCmd.Flags().BoolP("base64", "b", false, "view binary data as base64")
|
||||||
|
getCmd.Flags().BoolVarP(&runFlag, "run", "c", false, "execute the result as a shell command")
|
||||||
|
getCmd.Flags().Bool("no-template", false, "directly output template syntax")
|
||||||
|
getCmd.Flags().Bool("exists", false, "exit 0 if the key exists, exit 1 if not (no output)")
|
||||||
|
rootCmd.AddCommand(getCmd)
|
||||||
|
|
||||||
|
runCmd.Flags().BoolP("base64", "b", false, "view binary data as base64")
|
||||||
|
runCmd.Flags().Bool("no-template", false, "directly output template syntax")
|
||||||
|
rootCmd.AddCommand(runCmd)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
64
cmd/git.go
Normal file
64
cmd/git.go
Normal file
|
|
@ -0,0 +1,64 @@
|
||||||
|
/*
|
||||||
|
Copyright © 2025 Lewis Wynne <lew@ily.rs>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
)
|
||||||
|
|
||||||
|
var gitCmd = &cobra.Command{
|
||||||
|
Use: "git [args...]",
|
||||||
|
Short: "Run any arbitrary command. Use with caution.",
|
||||||
|
Long: `Run any arbitrary command. Use with caution.
|
||||||
|
|
||||||
|
The Git repository lives directly in the data directory
|
||||||
|
("PDA_DATA"). Store files (*.ndjson) are tracked by Git as-is.
|
||||||
|
|
||||||
|
If you manually modify files without using the built-in
|
||||||
|
commands, you may desync your repository.
|
||||||
|
|
||||||
|
Generally prefer "pda sync".`,
|
||||||
|
Args: cobra.ArbitraryArgs,
|
||||||
|
DisableFlagParsing: true,
|
||||||
|
SilenceUsage: true,
|
||||||
|
RunE: func(cmd *cobra.Command, args []string) error {
|
||||||
|
repoDir, err := ensureVCSInitialized()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
gitCmd := exec.Command("git", args...)
|
||||||
|
gitCmd.Dir = repoDir
|
||||||
|
gitCmd.Stdin = os.Stdin
|
||||||
|
gitCmd.Stdout = os.Stdout
|
||||||
|
gitCmd.Stderr = os.Stderr
|
||||||
|
return gitCmd.Run()
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
rootCmd.AddCommand(gitCmd)
|
||||||
|
}
|
||||||
24
cmd/glob.go
24
cmd/glob.go
|
|
@ -27,34 +27,14 @@ import (
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/gobwas/glob"
|
"github.com/gobwas/glob"
|
||||||
"github.com/spf13/cobra"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var defaultGlobSeparators = []rune{'/', '-', '_', '.', '@', ':', ' '}
|
var defaultGlobSeparators = []rune{'/', '-', '_', '.', '@', ':', ' '}
|
||||||
|
|
||||||
func defaultGlobSeparatorsDisplay() string {
|
func compileGlobMatchers(patterns []string) ([]glob.Glob, error) {
|
||||||
var b strings.Builder
|
|
||||||
for _, r := range defaultGlobSeparators {
|
|
||||||
b.WriteRune(r)
|
|
||||||
}
|
|
||||||
return b.String()
|
|
||||||
}
|
|
||||||
|
|
||||||
func parseGlobSeparators(cmd *cobra.Command) ([]rune, error) {
|
|
||||||
sepStr, err := cmd.Flags().GetString("glob-sep")
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if sepStr == "" {
|
|
||||||
return defaultGlobSeparators, nil
|
|
||||||
}
|
|
||||||
return []rune(sepStr), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func compileGlobMatchers(patterns []string, separators []rune) ([]glob.Glob, error) {
|
|
||||||
var matchers []glob.Glob
|
var matchers []glob.Glob
|
||||||
for _, pattern := range patterns {
|
for _, pattern := range patterns {
|
||||||
m, err := glob.Compile(strings.ToLower(pattern), separators...)
|
m, err := glob.Compile(strings.ToLower(pattern), defaultGlobSeparators...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
|
||||||
214
cmd/identity.go
Normal file
214
cmd/identity.go
Normal file
|
|
@ -0,0 +1,214 @@
|
||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"filippo.io/age"
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
)
|
||||||
|
|
||||||
|
var identityCmd = &cobra.Command{
|
||||||
|
Use: "identity",
|
||||||
|
Aliases: []string{"id"},
|
||||||
|
Short: "Show or create the age encryption identity",
|
||||||
|
Args: cobra.NoArgs,
|
||||||
|
RunE: identityRun,
|
||||||
|
SilenceUsage: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
func identityRun(cmd *cobra.Command, args []string) error {
|
||||||
|
showPath, err := cmd.Flags().GetBool("path")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
createNew, err := cmd.Flags().GetBool("new")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
addRecipient, err := cmd.Flags().GetString("add-recipient")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
removeRecipient, err := cmd.Flags().GetString("remove-recipient")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if createNew {
|
||||||
|
existing, err := loadIdentity()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot create identity: %v", err)
|
||||||
|
}
|
||||||
|
if existing != nil {
|
||||||
|
path, _ := identityPath()
|
||||||
|
return withHint(
|
||||||
|
fmt.Errorf("identity already exists at %s", path),
|
||||||
|
"delete the file manually before creating a new one",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
id, err := ensureIdentity()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot create identity: %v", err)
|
||||||
|
}
|
||||||
|
okf("pubkey %s", id.Recipient())
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if addRecipient != "" {
|
||||||
|
return identityAddRecipient(addRecipient)
|
||||||
|
}
|
||||||
|
|
||||||
|
if removeRecipient != "" {
|
||||||
|
return identityRemoveRecipient(removeRecipient)
|
||||||
|
}
|
||||||
|
|
||||||
|
if showPath {
|
||||||
|
path, err := identityPath()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
fmt.Println(path)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default: show identity info
|
||||||
|
id, err := loadIdentity()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot load identity: %v", err)
|
||||||
|
}
|
||||||
|
if id == nil {
|
||||||
|
printHint("no identity found — use 'pda identity --new' or 'pda set --encrypt' to create one")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
path, _ := identityPath()
|
||||||
|
okf("pubkey %s", id.Recipient())
|
||||||
|
okf("identity %s", path)
|
||||||
|
|
||||||
|
extra, err := loadRecipients()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot load recipients: %v", err)
|
||||||
|
}
|
||||||
|
for _, r := range extra {
|
||||||
|
okf("recipient %s", r)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func identityAddRecipient(key string) error {
|
||||||
|
r, err := age.ParseX25519Recipient(key)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot add recipient: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
identity, err := loadIdentity()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot add recipient: %v", err)
|
||||||
|
}
|
||||||
|
if identity == nil {
|
||||||
|
return withHint(
|
||||||
|
fmt.Errorf("cannot add recipient: no identity found"),
|
||||||
|
"create one first with 'pda identity --new'",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
if r.String() == identity.Recipient().String() {
|
||||||
|
return fmt.Errorf("cannot add recipient: key is your own identity")
|
||||||
|
}
|
||||||
|
|
||||||
|
existing, err := loadRecipients()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot add recipient: %v", err)
|
||||||
|
}
|
||||||
|
for _, e := range existing {
|
||||||
|
if e.String() == r.String() {
|
||||||
|
return fmt.Errorf("cannot add recipient: key already present")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
existing = append(existing, r)
|
||||||
|
if err := saveRecipients(existing); err != nil {
|
||||||
|
return fmt.Errorf("cannot add recipient: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
recipients, err := allRecipients(identity)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot add recipient: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
count, err := reencryptAllStores(identity, recipients)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot add recipient: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
okf("added recipient %s", r)
|
||||||
|
if count > 0 {
|
||||||
|
okf("re-encrypted %d secret(s)", count)
|
||||||
|
}
|
||||||
|
return autoSync("added recipient")
|
||||||
|
}
|
||||||
|
|
||||||
|
func identityRemoveRecipient(key string) error {
|
||||||
|
r, err := age.ParseX25519Recipient(key)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot remove recipient: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
identity, err := loadIdentity()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot remove recipient: %v", err)
|
||||||
|
}
|
||||||
|
if identity == nil {
|
||||||
|
return withHint(
|
||||||
|
fmt.Errorf("cannot remove recipient: no identity found"),
|
||||||
|
"create one first with 'pda identity --new'",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
existing, err := loadRecipients()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot remove recipient: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
found := false
|
||||||
|
var updated []*age.X25519Recipient
|
||||||
|
for _, e := range existing {
|
||||||
|
if e.String() == r.String() {
|
||||||
|
found = true
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
updated = append(updated, e)
|
||||||
|
}
|
||||||
|
if !found {
|
||||||
|
return fmt.Errorf("cannot remove recipient: key not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := saveRecipients(updated); err != nil {
|
||||||
|
return fmt.Errorf("cannot remove recipient: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
recipients, err := allRecipients(identity)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot remove recipient: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
count, err := reencryptAllStores(identity, recipients)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot remove recipient: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
okf("removed recipient %s", r)
|
||||||
|
if count > 0 {
|
||||||
|
okf("re-encrypted %d secret(s)", count)
|
||||||
|
}
|
||||||
|
return autoSync("removed recipient")
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
identityCmd.Flags().Bool("new", false, "generate a new identity (errors if one already exists)")
|
||||||
|
identityCmd.Flags().Bool("path", false, "print only the identity file path")
|
||||||
|
identityCmd.Flags().String("add-recipient", "", "add an age public key as an additional encryption recipient")
|
||||||
|
identityCmd.Flags().String("remove-recipient", "", "remove an age public key from the recipient list")
|
||||||
|
identityCmd.MarkFlagsMutuallyExclusive("new", "path", "add-recipient", "remove-recipient")
|
||||||
|
rootCmd.AddCommand(identityCmd)
|
||||||
|
}
|
||||||
129
cmd/init.go
Normal file
129
cmd/init.go
Normal file
|
|
@ -0,0 +1,129 @@
|
||||||
|
/*
|
||||||
|
Copyright © 2025 Lewis Wynne <lew@ily.rs>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
)
|
||||||
|
|
||||||
|
var initCmd = &cobra.Command{
|
||||||
|
Use: "init [remote-url]",
|
||||||
|
Short: "Initialise pda! version control",
|
||||||
|
SilenceUsage: true,
|
||||||
|
Args: cobra.MaximumNArgs(1),
|
||||||
|
RunE: vcsInit,
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
initCmd.Flags().Bool("clean", false, "remove .git from stores directory before initialising")
|
||||||
|
rootCmd.AddCommand(initCmd)
|
||||||
|
}
|
||||||
|
|
||||||
|
func vcsInit(cmd *cobra.Command, args []string) error {
|
||||||
|
store := &Store{}
|
||||||
|
repoDir, err := store.path()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
clean, err := cmd.Flags().GetBool("clean")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
hasRemote := len(args) == 1
|
||||||
|
|
||||||
|
if clean {
|
||||||
|
gitDir := filepath.Join(repoDir, ".git")
|
||||||
|
if _, err := os.Stat(gitDir); err == nil {
|
||||||
|
promptf("remove .git from '%s'? (y/n)", repoDir)
|
||||||
|
var confirm string
|
||||||
|
if err := scanln(&confirm); err != nil {
|
||||||
|
return fmt.Errorf("cannot init: %w", err)
|
||||||
|
}
|
||||||
|
if strings.ToLower(confirm) != "y" {
|
||||||
|
return fmt.Errorf("cannot init: aborted")
|
||||||
|
}
|
||||||
|
if err := os.RemoveAll(gitDir); err != nil {
|
||||||
|
return fmt.Errorf("cannot init: %w", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if hasRemote {
|
||||||
|
dbs, err := store.AllStores()
|
||||||
|
if err == nil && len(dbs) > 0 {
|
||||||
|
promptf("remove all existing stores and .gitignore, required for clone? (y/n)")
|
||||||
|
var confirm string
|
||||||
|
if err := scanln(&confirm); err != nil {
|
||||||
|
return fmt.Errorf("cannot init: %w", err)
|
||||||
|
}
|
||||||
|
if strings.ToLower(confirm) != "y" {
|
||||||
|
return fmt.Errorf("cannot init: aborted")
|
||||||
|
}
|
||||||
|
if err := wipeAllStores(store); err != nil {
|
||||||
|
return fmt.Errorf("cannot init: %w", err)
|
||||||
|
}
|
||||||
|
gi := filepath.Join(repoDir, ".gitignore")
|
||||||
|
if err := os.Remove(gi); err != nil && !os.IsNotExist(err) {
|
||||||
|
return fmt.Errorf("cannot init: %w", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
gitDir := filepath.Join(repoDir, ".git")
|
||||||
|
if _, err := os.Stat(gitDir); err == nil {
|
||||||
|
warnf("vcs already initialised")
|
||||||
|
printHint("use --clean to reinitialise")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if hasRemote {
|
||||||
|
// git clone requires the target directory to be empty
|
||||||
|
entries, err := os.ReadDir(repoDir)
|
||||||
|
if err == nil && len(entries) > 0 {
|
||||||
|
return withHint(fmt.Errorf("cannot init: stores directory not empty"), "use --clean with a remote to wipe and clone")
|
||||||
|
}
|
||||||
|
|
||||||
|
remote := args[0]
|
||||||
|
progressf("git clone %s %s", remote, repoDir)
|
||||||
|
if err := runGit("", "clone", remote, repoDir); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if err := os.MkdirAll(repoDir, 0o750); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
progressf("git init")
|
||||||
|
if err := runGit(repoDir, "init"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return writeGitignore(repoDir)
|
||||||
|
}
|
||||||
|
|
@ -31,17 +31,17 @@ import (
|
||||||
type KeySpec struct {
|
type KeySpec struct {
|
||||||
Raw string // Whole, unmodified user input
|
Raw string // Whole, unmodified user input
|
||||||
RawKey string // Key segment
|
RawKey string // Key segment
|
||||||
RawDB string // DB segment
|
RawDB string // Store segment
|
||||||
Key string // Normalised Key
|
Key string // Normalised Key
|
||||||
DB string // Normalised DB
|
DB string // Normalised store
|
||||||
}
|
}
|
||||||
|
|
||||||
// ParseKey parses "KEY[@DB]" into a normalized KeySpec.
|
// ParseKey parses "KEY[@STORE]" into a normalized KeySpec.
|
||||||
// When defaults is true, a missing DB defaults to the configured default.
|
// When defaults is true, a missing store defaults to the configured default.
|
||||||
func ParseKey(raw string, defaults bool) (KeySpec, error) {
|
func ParseKey(raw string, defaults bool) (KeySpec, error) {
|
||||||
parts := strings.Split(raw, "@")
|
parts := strings.Split(raw, "@")
|
||||||
if len(parts) > 2 {
|
if len(parts) > 2 {
|
||||||
return KeySpec{}, fmt.Errorf("bad key format, use KEY@DB")
|
return KeySpec{}, fmt.Errorf("bad key format, use KEY@STORE")
|
||||||
}
|
}
|
||||||
|
|
||||||
rawKey := parts[0]
|
rawKey := parts[0]
|
||||||
|
|
@ -49,7 +49,7 @@ func ParseKey(raw string, defaults bool) (KeySpec, error) {
|
||||||
if len(parts) == 2 {
|
if len(parts) == 2 {
|
||||||
rawDB = parts[1]
|
rawDB = parts[1]
|
||||||
if strings.TrimSpace(rawDB) == "" {
|
if strings.TrimSpace(rawDB) == "" {
|
||||||
return KeySpec{}, fmt.Errorf("bad key format, use KEY@DB")
|
return KeySpec{}, fmt.Errorf("bad key format, use KEY@STORE")
|
||||||
}
|
}
|
||||||
if err := validateDBName(rawDB); err != nil {
|
if err := validateDBName(rawDB); err != nil {
|
||||||
return KeySpec{}, err
|
return KeySpec{}, err
|
||||||
|
|
@ -80,7 +80,7 @@ func (k KeySpec) Full() string {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Display returns the normalized key reference
|
// Display returns the normalized key reference
|
||||||
// but omits the default database if none was set manually
|
// but omits the default store if none was set manually
|
||||||
func (k KeySpec) Display() string {
|
func (k KeySpec) Display() string {
|
||||||
if k.DB == "" || k.DB == config.Store.DefaultStoreName {
|
if k.DB == "" || k.DB == config.Store.DefaultStoreName {
|
||||||
return k.Key
|
return k.Key
|
||||||
|
|
|
||||||
|
|
@ -24,31 +24,98 @@ package cmd
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"os"
|
||||||
|
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
)
|
)
|
||||||
|
|
||||||
// delCmd represents the set command
|
// listStoresCmd represents the list-stores command
|
||||||
var listDbsCmd = &cobra.Command{
|
var listStoresCmd = &cobra.Command{
|
||||||
Use: "list-dbs",
|
Use: "list-stores",
|
||||||
Short: "List all dbs.",
|
Short: "List all stores",
|
||||||
Aliases: []string{"ls-dbs", "lsd"},
|
Aliases: []string{"lss"},
|
||||||
Args: cobra.NoArgs,
|
Args: cobra.NoArgs,
|
||||||
RunE: listDbs,
|
RunE: listStores,
|
||||||
SilenceUsage: true,
|
SilenceUsage: true,
|
||||||
}
|
}
|
||||||
|
|
||||||
func listDbs(cmd *cobra.Command, args []string) error {
|
func listStores(cmd *cobra.Command, args []string) error {
|
||||||
store := &Store{}
|
store := &Store{}
|
||||||
dbs, err := store.AllStores()
|
dbs, err := store.AllStores()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("cannot list-dbs: %v", err)
|
return fmt.Errorf("cannot list stores: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
short, err := cmd.Flags().GetBool("short")
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot list stores: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if short {
|
||||||
|
for _, db := range dbs {
|
||||||
|
fmt.Println("@" + db)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type storeInfo struct {
|
||||||
|
name string
|
||||||
|
keys int
|
||||||
|
size string
|
||||||
|
}
|
||||||
|
|
||||||
|
rows := make([]storeInfo, 0, len(dbs))
|
||||||
|
nameW, keysW, sizeW := len("Store"), len("Keys"), len("Size")
|
||||||
|
|
||||||
for _, db := range dbs {
|
for _, db := range dbs {
|
||||||
fmt.Println("@" + db)
|
p, err := store.storePath(db)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot list stores: %v", err)
|
||||||
|
}
|
||||||
|
fi, err := os.Stat(p)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot list stores: %v", err)
|
||||||
|
}
|
||||||
|
entries, err := readStoreFile(p, nil)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot list stores: %v", err)
|
||||||
|
}
|
||||||
|
name := "@" + db
|
||||||
|
keysStr := fmt.Sprintf("%d", len(entries))
|
||||||
|
sizeStr := formatSize(int(fi.Size()))
|
||||||
|
if len(name) > nameW {
|
||||||
|
nameW = len(name)
|
||||||
|
}
|
||||||
|
if len(keysStr) > keysW {
|
||||||
|
keysW = len(keysStr)
|
||||||
|
}
|
||||||
|
if len(sizeStr) > sizeW {
|
||||||
|
sizeW = len(sizeStr)
|
||||||
|
}
|
||||||
|
rows = append(rows, storeInfo{name: name, keys: len(entries), size: sizeStr})
|
||||||
|
}
|
||||||
|
|
||||||
|
underline := func(s string) string {
|
||||||
|
if stdoutIsTerminal() {
|
||||||
|
return "\033[4m" + s + "\033[0m"
|
||||||
|
}
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
noHeader, _ := cmd.Flags().GetBool("no-header")
|
||||||
|
if !noHeader {
|
||||||
|
fmt.Printf("%*s%s %*s%s %s\n",
|
||||||
|
keysW-len("Keys"), "", underline("Keys"),
|
||||||
|
sizeW-len("Size"), "", underline("Size"),
|
||||||
|
underline("Store"))
|
||||||
|
}
|
||||||
|
for _, r := range rows {
|
||||||
|
fmt.Printf("%*d %*s %s\n", keysW, r.keys, sizeW, r.size, r.name)
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
rootCmd.AddCommand(listDbsCmd)
|
listStoresCmd.Flags().Bool("short", false, "only print store names")
|
||||||
|
listStoresCmd.Flags().Bool("no-header", false, "suppress the header row")
|
||||||
|
rootCmd.AddCommand(listStoresCmd)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
800
cmd/list.go
800
cmd/list.go
|
|
@ -23,27 +23,153 @@ THE SOFTWARE.
|
||||||
package cmd
|
package cmd
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"encoding/json"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"os"
|
||||||
|
"slices"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"unicode/utf8"
|
||||||
|
|
||||||
"github.com/dgraph-io/badger/v4"
|
|
||||||
"github.com/jedib0t/go-pretty/v6/table"
|
"github.com/jedib0t/go-pretty/v6/table"
|
||||||
|
"github.com/jedib0t/go-pretty/v6/text"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
|
"golang.org/x/term"
|
||||||
|
)
|
||||||
|
|
||||||
|
// formatEnum implements pflag.Value for format selection.
|
||||||
|
type formatEnum string
|
||||||
|
|
||||||
|
func (e *formatEnum) String() string { return string(*e) }
|
||||||
|
|
||||||
|
func (e *formatEnum) Set(v string) error {
|
||||||
|
if err := validListFormat(v); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
*e = formatEnum(v)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func validListFormat(v string) error {
|
||||||
|
switch v {
|
||||||
|
case "table", "tsv", "csv", "html", "markdown", "ndjson", "json":
|
||||||
|
return nil
|
||||||
|
default:
|
||||||
|
return fmt.Errorf("must be one of 'table', 'tsv', 'csv', 'html', 'markdown', 'ndjson', or 'json'")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *formatEnum) Type() string { return "format" }
|
||||||
|
|
||||||
|
var columnNames = map[string]columnKind{
|
||||||
|
"key": columnKey,
|
||||||
|
"store": columnStore,
|
||||||
|
"value": columnValue,
|
||||||
|
"meta": columnMeta,
|
||||||
|
"size": columnSize,
|
||||||
|
"ttl": columnTTL,
|
||||||
|
}
|
||||||
|
|
||||||
|
func validListColumns(v string) error {
|
||||||
|
seen := make(map[string]bool)
|
||||||
|
for _, raw := range strings.Split(v, ",") {
|
||||||
|
tok := strings.TrimSpace(raw)
|
||||||
|
if _, ok := columnNames[tok]; !ok {
|
||||||
|
return fmt.Errorf("must be a comma-separated list of 'key', 'store', 'value', 'meta', 'size', 'ttl' (got '%s')", tok)
|
||||||
|
}
|
||||||
|
if seen[tok] {
|
||||||
|
return fmt.Errorf("duplicate column '%s'", tok)
|
||||||
|
}
|
||||||
|
seen[tok] = true
|
||||||
|
}
|
||||||
|
if len(seen) == 0 {
|
||||||
|
return fmt.Errorf("at least one column is required")
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseColumns(v string) []columnKind {
|
||||||
|
var cols []columnKind
|
||||||
|
for _, raw := range strings.Split(v, ",") {
|
||||||
|
tok := strings.TrimSpace(raw)
|
||||||
|
if kind, ok := columnNames[tok]; ok {
|
||||||
|
cols = append(cols, kind)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return cols
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
listBase64 bool
|
||||||
|
listCount bool
|
||||||
|
listNoKeys bool
|
||||||
|
listNoStore bool
|
||||||
|
listNoValues bool
|
||||||
|
listNoMeta bool
|
||||||
|
listNoSize bool
|
||||||
|
listNoTTL bool
|
||||||
|
listFull bool
|
||||||
|
listAll bool
|
||||||
|
listNoHeader bool
|
||||||
|
listFormat formatEnum
|
||||||
|
|
||||||
|
dimStyle = text.Colors{text.Faint, text.Italic}
|
||||||
|
)
|
||||||
|
|
||||||
|
type columnKind int
|
||||||
|
|
||||||
|
const (
|
||||||
|
columnKey columnKind = iota
|
||||||
|
columnValue
|
||||||
|
columnTTL
|
||||||
|
columnStore
|
||||||
|
columnMeta
|
||||||
|
columnSize
|
||||||
)
|
)
|
||||||
|
|
||||||
var listCmd = &cobra.Command{
|
var listCmd = &cobra.Command{
|
||||||
Use: "list [DB]",
|
Use: "list [STORE]",
|
||||||
Short: "List the contents of a db.",
|
Short: "List the contents of all stores",
|
||||||
Aliases: []string{"ls"},
|
Long: `List the contents of all stores.
|
||||||
Args: cobra.MaximumNArgs(1),
|
|
||||||
RunE: list,
|
By default, list shows entries from every store. Pass a store name as a
|
||||||
SilenceUsage: true,
|
positional argument to narrow to a single store, or use --store/-s with a
|
||||||
|
glob pattern to filter by store name.
|
||||||
|
|
||||||
|
Use --key/-k and --value/-v to filter by key or value glob, and --store/-s
|
||||||
|
to filter by store name. All filters are repeatable and OR'd within the
|
||||||
|
same flag.`,
|
||||||
|
Aliases: []string{"ls"},
|
||||||
|
Args: cobra.MaximumNArgs(1),
|
||||||
|
ValidArgsFunction: completeStores,
|
||||||
|
RunE: list,
|
||||||
|
SilenceUsage: true,
|
||||||
}
|
}
|
||||||
|
|
||||||
func list(cmd *cobra.Command, args []string) error {
|
func list(cmd *cobra.Command, args []string) error {
|
||||||
|
if listFormat == "" {
|
||||||
|
listFormat = formatEnum(config.List.DefaultListFormat)
|
||||||
|
}
|
||||||
|
|
||||||
store := &Store{}
|
store := &Store{}
|
||||||
targetDB := "@" + config.Store.DefaultStoreName
|
|
||||||
if len(args) == 1 {
|
storePatterns, err := cmd.Flags().GetStringSlice("store")
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot ls: %v", err)
|
||||||
|
}
|
||||||
|
if len(storePatterns) > 0 && len(args) > 0 {
|
||||||
|
return fmt.Errorf("cannot use --store with a store argument")
|
||||||
|
}
|
||||||
|
|
||||||
|
allStores := len(args) == 0 && (config.List.AlwaysShowAllStores || listAll)
|
||||||
|
var targetDB string
|
||||||
|
if allStores {
|
||||||
|
targetDB = "all"
|
||||||
|
} else if len(args) == 0 {
|
||||||
|
targetDB = "@" + config.Store.DefaultStoreName
|
||||||
|
} else {
|
||||||
rawArg := args[0]
|
rawArg := args[0]
|
||||||
dbName, err := store.parseDB(rawArg, false)
|
dbName, err := store.parseDB(rawArg, false)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
@ -52,135 +178,615 @@ func list(cmd *cobra.Command, args []string) error {
|
||||||
if _, err := store.FindStore(dbName); err != nil {
|
if _, err := store.FindStore(dbName); err != nil {
|
||||||
var notFound errNotFound
|
var notFound errNotFound
|
||||||
if errors.As(err, ¬Found) {
|
if errors.As(err, ¬Found) {
|
||||||
return fmt.Errorf("cannot ls '%s': No such DB", args[0])
|
return fmt.Errorf("cannot ls '%s': %w", args[0], err)
|
||||||
}
|
}
|
||||||
return fmt.Errorf("cannot ls '%s': %v", args[0], err)
|
return fmt.Errorf("cannot ls '%s': %v", args[0], err)
|
||||||
}
|
}
|
||||||
targetDB = "@" + dbName
|
targetDB = "@" + dbName
|
||||||
}
|
}
|
||||||
|
|
||||||
flags, err := enrichFlags()
|
columns := parseColumns(config.List.DefaultColumns)
|
||||||
|
|
||||||
|
// Each --no-X flag: if explicitly true, remove the column;
|
||||||
|
// if explicitly false (--no-X=false), add the column if missing.
|
||||||
|
type colToggle struct {
|
||||||
|
flag string
|
||||||
|
kind columnKind
|
||||||
|
}
|
||||||
|
for _, ct := range []colToggle{
|
||||||
|
{"no-keys", columnKey},
|
||||||
|
{"no-store", columnStore},
|
||||||
|
{"no-values", columnValue},
|
||||||
|
{"no-meta", columnMeta},
|
||||||
|
{"no-size", columnSize},
|
||||||
|
{"no-ttl", columnTTL},
|
||||||
|
} {
|
||||||
|
if !cmd.Flags().Changed(ct.flag) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
val, _ := cmd.Flags().GetBool(ct.flag)
|
||||||
|
if val {
|
||||||
|
columns = slices.DeleteFunc(columns, func(c columnKind) bool { return c == ct.kind })
|
||||||
|
} else if !slices.Contains(columns, ct.kind) {
|
||||||
|
columns = append(columns, ct.kind)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(columns) == 0 {
|
||||||
|
return withHint(fmt.Errorf("cannot ls '%s': no columns selected", targetDB), "disable some --no-* flags")
|
||||||
|
}
|
||||||
|
|
||||||
|
keyPatterns, err := cmd.Flags().GetStringSlice("key")
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot ls '%s': %v", targetDB, err)
|
||||||
|
}
|
||||||
|
matchers, err := compileGlobMatchers(keyPatterns)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("cannot ls '%s': %v", targetDB, err)
|
return fmt.Errorf("cannot ls '%s': %v", targetDB, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
globPatterns, err := cmd.Flags().GetStringSlice("glob")
|
valuePatterns, err := cmd.Flags().GetStringSlice("value")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("cannot ls '%s': %v", targetDB, err)
|
return fmt.Errorf("cannot ls '%s': %v", targetDB, err)
|
||||||
}
|
}
|
||||||
separators, err := parseGlobSeparators(cmd)
|
valueMatchers, err := compileValueMatchers(valuePatterns)
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("cannot ls '%s': %v", targetDB, err)
|
|
||||||
}
|
|
||||||
matchers, err := compileGlobMatchers(globPatterns, separators)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("cannot ls '%s': %v", targetDB, err)
|
return fmt.Errorf("cannot ls '%s': %v", targetDB, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
columnKinds, err := requireColumns(flags)
|
storeMatchers, err := compileGlobMatchers(storePatterns)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("cannot ls '%s': %v", targetDB, err)
|
return fmt.Errorf("cannot ls '%s': %v", targetDB, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
identity, _ := loadIdentity()
|
||||||
|
recipients, err := allRecipients(identity)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot ls '%s': %v", targetDB, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var entries []Entry
|
||||||
|
if allStores {
|
||||||
|
storeNames, err := store.AllStores()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot ls '%s': %v", targetDB, err)
|
||||||
|
}
|
||||||
|
for _, name := range storeNames {
|
||||||
|
p, err := store.storePath(name)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot ls '%s': %v", targetDB, err)
|
||||||
|
}
|
||||||
|
storeEntries, err := readStoreFile(p, identity)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot ls '%s': %v", targetDB, err)
|
||||||
|
}
|
||||||
|
for i := range storeEntries {
|
||||||
|
storeEntries[i].StoreName = name
|
||||||
|
}
|
||||||
|
entries = append(entries, storeEntries...)
|
||||||
|
}
|
||||||
|
slices.SortFunc(entries, func(a, b Entry) int {
|
||||||
|
if c := strings.Compare(a.Key, b.Key); c != 0 {
|
||||||
|
return c
|
||||||
|
}
|
||||||
|
return strings.Compare(a.StoreName, b.StoreName)
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
dbName := targetDB[1:] // strip leading '@'
|
||||||
|
p, err := store.storePath(dbName)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot ls '%s': %v", targetDB, err)
|
||||||
|
}
|
||||||
|
entries, err = readStoreFile(p, identity)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot ls '%s': %v", targetDB, err)
|
||||||
|
}
|
||||||
|
for i := range entries {
|
||||||
|
entries[i].StoreName = dbName
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Filter by key glob, value regex, and store glob
|
||||||
|
var filtered []Entry
|
||||||
|
for _, e := range entries {
|
||||||
|
if globMatch(matchers, e.Key) && valueMatch(valueMatchers, e) && globMatch(storeMatchers, e.StoreName) {
|
||||||
|
filtered = append(filtered, e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Stable sort: pinned entries first, preserving alphabetical order within each group
|
||||||
|
slices.SortStableFunc(filtered, func(a, b Entry) int {
|
||||||
|
if a.Pinned && !b.Pinned {
|
||||||
|
return -1
|
||||||
|
}
|
||||||
|
if !a.Pinned && b.Pinned {
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
return 0
|
||||||
|
})
|
||||||
|
|
||||||
|
if listCount {
|
||||||
|
fmt.Fprintln(cmd.OutOrStdout(), len(filtered))
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
hasFilters := len(matchers) > 0 || len(valueMatchers) > 0 || len(storeMatchers) > 0
|
||||||
|
if hasFilters && len(filtered) == 0 {
|
||||||
|
var parts []string
|
||||||
|
if len(matchers) > 0 {
|
||||||
|
parts = append(parts, fmt.Sprintf("key pattern %s", formatGlobPatterns(keyPatterns)))
|
||||||
|
}
|
||||||
|
if len(valueMatchers) > 0 {
|
||||||
|
parts = append(parts, fmt.Sprintf("value pattern %s", formatValuePatterns(valuePatterns)))
|
||||||
|
}
|
||||||
|
if len(storeMatchers) > 0 {
|
||||||
|
parts = append(parts, fmt.Sprintf("store pattern %s", formatGlobPatterns(storePatterns)))
|
||||||
|
}
|
||||||
|
return fmt.Errorf("cannot ls '%s': no matches for %s", targetDB, strings.Join(parts, " and "))
|
||||||
|
}
|
||||||
|
|
||||||
output := cmd.OutOrStdout()
|
output := cmd.OutOrStdout()
|
||||||
|
|
||||||
|
// NDJSON format: emit JSON lines directly (encrypted form for secrets)
|
||||||
|
if listFormat.String() == "ndjson" {
|
||||||
|
for _, e := range filtered {
|
||||||
|
je, err := encodeJsonEntry(e, recipients)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot ls '%s': %v", targetDB, err)
|
||||||
|
}
|
||||||
|
je.Store = e.StoreName
|
||||||
|
data, err := json.Marshal(je)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot ls '%s': %v", targetDB, err)
|
||||||
|
}
|
||||||
|
fmt.Fprintln(output, string(data))
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// JSON format: emit a single JSON array
|
||||||
|
if listFormat.String() == "json" {
|
||||||
|
var jsonEntries []jsonEntry
|
||||||
|
for _, e := range filtered {
|
||||||
|
je, err := encodeJsonEntry(e, recipients)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot ls '%s': %v", targetDB, err)
|
||||||
|
}
|
||||||
|
je.Store = e.StoreName
|
||||||
|
jsonEntries = append(jsonEntries, je)
|
||||||
|
}
|
||||||
|
data, err := json.Marshal(jsonEntries)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot ls '%s': %v", targetDB, err)
|
||||||
|
}
|
||||||
|
fmt.Fprintln(output, string(data))
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Table-based formats
|
||||||
|
showValues := slices.Contains(columns, columnValue)
|
||||||
tw := table.NewWriter()
|
tw := table.NewWriter()
|
||||||
tw.SetOutputMirror(output)
|
tw.SetOutputMirror(output)
|
||||||
tw.SetStyle(table.StyleDefault)
|
tw.SetStyle(table.StyleDefault)
|
||||||
// Should these be settable flags?
|
|
||||||
tw.Style().Options.SeparateHeader = false
|
tw.Style().Options.SeparateHeader = false
|
||||||
tw.Style().Options.SeparateFooter = false
|
tw.Style().Options.SeparateFooter = false
|
||||||
tw.Style().Options.DrawBorder = false
|
tw.Style().Options.DrawBorder = false
|
||||||
tw.Style().Options.SeparateRows = false
|
tw.Style().Options.SeparateRows = false
|
||||||
tw.Style().Options.SeparateColumns = false
|
tw.Style().Options.SeparateColumns = false
|
||||||
|
tw.Style().Box.PaddingLeft = ""
|
||||||
|
tw.Style().Box.PaddingRight = " "
|
||||||
|
|
||||||
var maxContentWidths []int
|
tty := stdoutIsTerminal() && listFormat.String() == "table"
|
||||||
maxContentWidths = make([]int, len(columnKinds))
|
|
||||||
|
|
||||||
if flags.header {
|
if !(listNoHeader || config.List.AlwaysHideHeader) {
|
||||||
header := buildHeaderCells(columnKinds)
|
tw.AppendHeader(headerRow(columns, tty))
|
||||||
updateMaxContentWidths(maxContentWidths, header)
|
tw.Style().Format.Header = text.FormatDefault
|
||||||
tw.AppendHeader(stringSliceToRow(header))
|
|
||||||
}
|
}
|
||||||
|
lay := computeLayout(columns, output, filtered)
|
||||||
|
|
||||||
placeholder := "**********"
|
for _, e := range filtered {
|
||||||
var matchedCount int
|
var valueStr string
|
||||||
trans := TransactionArgs{
|
dimValue := false
|
||||||
key: targetDB,
|
if showValues {
|
||||||
readonly: true,
|
if e.Locked {
|
||||||
sync: true,
|
valueStr = "locked (identity file missing)"
|
||||||
transact: func(tx *badger.Txn, k []byte) error {
|
dimValue = true
|
||||||
opts := badger.DefaultIteratorOptions
|
} else {
|
||||||
opts.PrefetchSize = 10
|
valueStr = store.FormatBytes(listBase64, e.Value)
|
||||||
opts.PrefetchValues = flags.value
|
if !utf8.Valid(e.Value) && !listBase64 {
|
||||||
it := tx.NewIterator(opts)
|
dimValue = true
|
||||||
defer it.Close()
|
|
||||||
var valueBuf []byte
|
|
||||||
for it.Rewind(); it.Valid(); it.Next() {
|
|
||||||
item := it.Item()
|
|
||||||
key := string(item.KeyCopy(nil))
|
|
||||||
if !globMatch(matchers, key) {
|
|
||||||
continue
|
|
||||||
}
|
}
|
||||||
matchedCount++
|
|
||||||
meta := item.UserMeta()
|
|
||||||
isSecret := meta&metaSecret != 0
|
|
||||||
|
|
||||||
var valueStr string
|
|
||||||
if flags.value && (!isSecret || flags.secrets) {
|
|
||||||
if err := item.Value(func(v []byte) error {
|
|
||||||
valueBuf = append(valueBuf[:0], v...)
|
|
||||||
return nil
|
|
||||||
}); err != nil {
|
|
||||||
return fmt.Errorf("cannot ls '%s': %v", targetDB, err)
|
|
||||||
}
|
|
||||||
valueStr = store.FormatBytes(flags.binary, valueBuf)
|
|
||||||
}
|
|
||||||
|
|
||||||
columns := make([]string, 0, len(columnKinds))
|
|
||||||
for _, column := range columnKinds {
|
|
||||||
switch column {
|
|
||||||
case columnKey:
|
|
||||||
columns = append(columns, key)
|
|
||||||
case columnValue:
|
|
||||||
if isSecret && !flags.secrets {
|
|
||||||
columns = append(columns, placeholder)
|
|
||||||
} else {
|
|
||||||
columns = append(columns, valueStr)
|
|
||||||
}
|
|
||||||
case columnTTL:
|
|
||||||
columns = append(columns, formatExpiry(item.ExpiresAt()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
updateMaxContentWidths(maxContentWidths, columns)
|
|
||||||
tw.AppendRow(stringSliceToRow(columns))
|
|
||||||
}
|
}
|
||||||
return nil
|
if !(listFull || config.List.AlwaysShowFullValues) {
|
||||||
},
|
valueStr = summariseValue(valueStr, lay.value, tty)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
row := make(table.Row, 0, len(columns))
|
||||||
|
for _, col := range columns {
|
||||||
|
switch col {
|
||||||
|
case columnKey:
|
||||||
|
if tty {
|
||||||
|
row = append(row, text.Bold.Sprint(e.Key))
|
||||||
|
} else {
|
||||||
|
row = append(row, e.Key)
|
||||||
|
}
|
||||||
|
case columnValue:
|
||||||
|
if tty && dimValue {
|
||||||
|
row = append(row, dimStyle.Sprint(valueStr))
|
||||||
|
} else {
|
||||||
|
row = append(row, valueStr)
|
||||||
|
}
|
||||||
|
case columnStore:
|
||||||
|
if tty {
|
||||||
|
row = append(row, text.Colors{text.Bold, text.FgYellow}.Sprint(e.StoreName))
|
||||||
|
} else {
|
||||||
|
row = append(row, e.StoreName)
|
||||||
|
}
|
||||||
|
case columnMeta:
|
||||||
|
if tty {
|
||||||
|
row = append(row, colorizeMeta(e))
|
||||||
|
} else {
|
||||||
|
row = append(row, entryMetaString(e))
|
||||||
|
}
|
||||||
|
case columnSize:
|
||||||
|
sizeStr := formatSize(len(e.Value))
|
||||||
|
if tty {
|
||||||
|
if len(e.Value) >= 1000 {
|
||||||
|
sizeStr = text.Colors{text.Bold, text.FgGreen}.Sprint(sizeStr)
|
||||||
|
} else {
|
||||||
|
sizeStr = text.FgGreen.Sprint(sizeStr)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
row = append(row, sizeStr)
|
||||||
|
case columnTTL:
|
||||||
|
ttlStr := formatExpiry(e.ExpiresAt)
|
||||||
|
if tty && e.ExpiresAt == 0 {
|
||||||
|
ttlStr = dimStyle.Sprint(ttlStr)
|
||||||
|
}
|
||||||
|
row = append(row, ttlStr)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tw.AppendRow(row)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := store.Transaction(trans); err != nil {
|
applyColumnWidths(tw, columns, output, lay, listFull || config.List.AlwaysShowFullValues)
|
||||||
return err
|
renderTable(tw)
|
||||||
}
|
|
||||||
|
|
||||||
if len(matchers) > 0 && matchedCount == 0 {
|
|
||||||
return fmt.Errorf("cannot ls '%s': No matches for pattern %s", targetDB, formatGlobPatterns(globPatterns))
|
|
||||||
}
|
|
||||||
|
|
||||||
applyColumnConstraints(tw, columnKinds, output, maxContentWidths)
|
|
||||||
|
|
||||||
flags.render(tw)
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// summariseValue flattens a value to its first line and, when maxWidth > 0,
|
||||||
|
// truncates to fit. In both cases it appends "(..N more chars)" showing the
|
||||||
|
// total number of omitted characters.
|
||||||
|
func summariseValue(s string, maxWidth int, tty bool) string {
|
||||||
|
first := s
|
||||||
|
if i := strings.IndexByte(s, '\n'); i >= 0 {
|
||||||
|
first = s[:i]
|
||||||
|
}
|
||||||
|
|
||||||
|
totalRunes := utf8.RuneCountInString(s)
|
||||||
|
firstRunes := utf8.RuneCountInString(first)
|
||||||
|
|
||||||
|
// Nothing omitted and fits (or no width constraint).
|
||||||
|
if firstRunes == totalRunes && (maxWidth <= 0 || firstRunes <= maxWidth) {
|
||||||
|
return first
|
||||||
|
}
|
||||||
|
|
||||||
|
// How many runes of first can we show?
|
||||||
|
showRunes := firstRunes
|
||||||
|
if maxWidth > 0 && showRunes > maxWidth {
|
||||||
|
showRunes = maxWidth
|
||||||
|
}
|
||||||
|
|
||||||
|
style := func(s string) string {
|
||||||
|
if tty {
|
||||||
|
return dimStyle.Sprint(s)
|
||||||
|
}
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
// Iteratively make room for the suffix (at most two passes since
|
||||||
|
// the digit count can change by one at a boundary like 9→10).
|
||||||
|
for range 2 {
|
||||||
|
omitted := totalRunes - showRunes
|
||||||
|
if omitted <= 0 {
|
||||||
|
return first
|
||||||
|
}
|
||||||
|
suffix := fmt.Sprintf(" (..%d more chars)", omitted)
|
||||||
|
suffixRunes := utf8.RuneCountInString(suffix)
|
||||||
|
if maxWidth <= 0 {
|
||||||
|
return first + style(suffix)
|
||||||
|
}
|
||||||
|
if showRunes+suffixRunes <= maxWidth {
|
||||||
|
runes := []rune(first)
|
||||||
|
if showRunes < len(runes) {
|
||||||
|
first = string(runes[:showRunes])
|
||||||
|
}
|
||||||
|
return first + style(suffix)
|
||||||
|
}
|
||||||
|
avail := maxWidth - suffixRunes
|
||||||
|
if avail <= 0 {
|
||||||
|
// Suffix alone exceeds maxWidth; fall through to hard trim.
|
||||||
|
break
|
||||||
|
}
|
||||||
|
showRunes = avail
|
||||||
|
}
|
||||||
|
|
||||||
|
// Column too narrow for the suffix — just truncate with an ellipsis.
|
||||||
|
if maxWidth >= 2 {
|
||||||
|
return text.Trim(first, maxWidth-1) + style("…")
|
||||||
|
}
|
||||||
|
return text.Trim(first, maxWidth)
|
||||||
|
}
|
||||||
|
|
||||||
|
func headerRow(columns []columnKind, tty bool) table.Row {
|
||||||
|
h := func(s string) interface{} {
|
||||||
|
if tty {
|
||||||
|
return text.Underline.Sprint(s)
|
||||||
|
}
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
row := make(table.Row, 0, len(columns))
|
||||||
|
for _, col := range columns {
|
||||||
|
switch col {
|
||||||
|
case columnKey:
|
||||||
|
row = append(row, h("Key"))
|
||||||
|
case columnStore:
|
||||||
|
row = append(row, h("Store"))
|
||||||
|
case columnValue:
|
||||||
|
row = append(row, h("Value"))
|
||||||
|
case columnMeta:
|
||||||
|
row = append(row, h("Meta"))
|
||||||
|
case columnSize:
|
||||||
|
row = append(row, h("Size"))
|
||||||
|
case columnTTL:
|
||||||
|
row = append(row, h("TTL"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return row
|
||||||
|
}
|
||||||
|
|
||||||
|
const (
|
||||||
|
keyColumnWidthCap = 30
|
||||||
|
storeColumnWidthCap = 20
|
||||||
|
sizeColumnWidthCap = 10
|
||||||
|
ttlColumnWidthCap = 20
|
||||||
|
)
|
||||||
|
|
||||||
|
// columnLayout holds the resolved max widths for each column kind.
|
||||||
|
type columnLayout struct {
|
||||||
|
key, store, value, meta, size, ttl int
|
||||||
|
}
|
||||||
|
|
||||||
|
// computeLayout derives column widths from the terminal size and actual
|
||||||
|
// content widths of the key/TTL columns (capped at fixed maximums). This
|
||||||
|
// avoids reserving 30+40 chars for key+TTL when the real content is narrower.
|
||||||
|
func computeLayout(columns []columnKind, out io.Writer, entries []Entry) columnLayout {
|
||||||
|
var lay columnLayout
|
||||||
|
termWidth := detectTerminalWidth(out)
|
||||||
|
|
||||||
|
// Meta column is always exactly 4 chars wide (ewtp).
|
||||||
|
lay.meta = 4
|
||||||
|
|
||||||
|
// Ensure columns are at least as wide as their headers.
|
||||||
|
lay.key = len("Key")
|
||||||
|
lay.store = len("Store")
|
||||||
|
lay.size = len("Size")
|
||||||
|
lay.ttl = len("TTL")
|
||||||
|
|
||||||
|
// Scan entries for actual max key/store/size/TTL content widths.
|
||||||
|
for _, e := range entries {
|
||||||
|
if w := utf8.RuneCountInString(e.Key); w > lay.key {
|
||||||
|
lay.key = w
|
||||||
|
}
|
||||||
|
if w := utf8.RuneCountInString(e.StoreName); w > lay.store {
|
||||||
|
lay.store = w
|
||||||
|
}
|
||||||
|
if w := utf8.RuneCountInString(formatSize(len(e.Value))); w > lay.size {
|
||||||
|
lay.size = w
|
||||||
|
}
|
||||||
|
if w := utf8.RuneCountInString(formatExpiry(e.ExpiresAt)); w > lay.ttl {
|
||||||
|
lay.ttl = w
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if lay.key > keyColumnWidthCap {
|
||||||
|
lay.key = keyColumnWidthCap
|
||||||
|
}
|
||||||
|
if lay.store > storeColumnWidthCap {
|
||||||
|
lay.store = storeColumnWidthCap
|
||||||
|
}
|
||||||
|
if lay.size > sizeColumnWidthCap {
|
||||||
|
lay.size = sizeColumnWidthCap
|
||||||
|
}
|
||||||
|
if lay.ttl > ttlColumnWidthCap {
|
||||||
|
lay.ttl = ttlColumnWidthCap
|
||||||
|
}
|
||||||
|
|
||||||
|
if termWidth <= 0 {
|
||||||
|
return lay
|
||||||
|
}
|
||||||
|
|
||||||
|
padding := len(columns) * 2
|
||||||
|
available := termWidth - padding
|
||||||
|
if available < len(columns) {
|
||||||
|
return lay
|
||||||
|
}
|
||||||
|
|
||||||
|
// Give the value column whatever is left after fixed-width columns.
|
||||||
|
lay.value = available
|
||||||
|
for _, col := range columns {
|
||||||
|
switch col {
|
||||||
|
case columnKey:
|
||||||
|
lay.value -= lay.key
|
||||||
|
case columnStore:
|
||||||
|
lay.value -= lay.store
|
||||||
|
case columnMeta:
|
||||||
|
lay.value -= lay.meta
|
||||||
|
case columnSize:
|
||||||
|
lay.value -= lay.size
|
||||||
|
case columnTTL:
|
||||||
|
lay.value -= lay.ttl
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if lay.value < 10 {
|
||||||
|
lay.value = 10
|
||||||
|
}
|
||||||
|
return lay
|
||||||
|
}
|
||||||
|
|
||||||
|
func applyColumnWidths(tw table.Writer, columns []columnKind, out io.Writer, lay columnLayout, full bool) {
|
||||||
|
termWidth := detectTerminalWidth(out)
|
||||||
|
if termWidth <= 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
tw.SetAllowedRowLength(termWidth)
|
||||||
|
|
||||||
|
var configs []table.ColumnConfig
|
||||||
|
for i, col := range columns {
|
||||||
|
cc := table.ColumnConfig{Number: i + 1}
|
||||||
|
switch col {
|
||||||
|
case columnKey:
|
||||||
|
cc.WidthMax = lay.key
|
||||||
|
cc.WidthMaxEnforcer = text.Trim
|
||||||
|
case columnStore:
|
||||||
|
cc.WidthMax = lay.store
|
||||||
|
cc.WidthMaxEnforcer = text.Trim
|
||||||
|
cc.Align = text.AlignRight
|
||||||
|
cc.AlignHeader = text.AlignRight
|
||||||
|
case columnValue:
|
||||||
|
cc.WidthMax = lay.value
|
||||||
|
if full {
|
||||||
|
cc.WidthMaxEnforcer = text.WrapText
|
||||||
|
}
|
||||||
|
// When !full, values are already pre-truncated by
|
||||||
|
// summariseValue — no enforcer needed.
|
||||||
|
case columnMeta:
|
||||||
|
cc.WidthMax = lay.meta
|
||||||
|
cc.WidthMaxEnforcer = text.Trim
|
||||||
|
cc.Align = text.AlignRight
|
||||||
|
cc.AlignHeader = text.AlignRight
|
||||||
|
case columnSize:
|
||||||
|
cc.WidthMax = lay.size
|
||||||
|
cc.WidthMaxEnforcer = text.Trim
|
||||||
|
cc.Align = text.AlignRight
|
||||||
|
cc.AlignHeader = text.AlignRight
|
||||||
|
case columnTTL:
|
||||||
|
cc.WidthMax = lay.ttl
|
||||||
|
cc.WidthMaxEnforcer = text.Trim
|
||||||
|
cc.Align = text.AlignRight
|
||||||
|
cc.AlignHeader = text.AlignRight
|
||||||
|
}
|
||||||
|
configs = append(configs, cc)
|
||||||
|
}
|
||||||
|
tw.SetColumnConfigs(configs)
|
||||||
|
}
|
||||||
|
|
||||||
|
func detectTerminalWidth(out io.Writer) int {
|
||||||
|
type fd interface{ Fd() uintptr }
|
||||||
|
if f, ok := out.(fd); ok {
|
||||||
|
if w, _, err := term.GetSize(int(f.Fd())); err == nil && w > 0 {
|
||||||
|
return w
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if w, _, err := term.GetSize(int(os.Stdout.Fd())); err == nil && w > 0 {
|
||||||
|
return w
|
||||||
|
}
|
||||||
|
if cols := os.Getenv("COLUMNS"); cols != "" {
|
||||||
|
if parsed, err := strconv.Atoi(cols); err == nil && parsed > 0 {
|
||||||
|
return parsed
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
// entryMetaString returns a 4-char flag string: (e)ncrypted (w)ritable (t)tl (p)inned.
|
||||||
|
func entryMetaString(e Entry) string {
|
||||||
|
var b [4]byte
|
||||||
|
if e.Secret {
|
||||||
|
b[0] = 'e'
|
||||||
|
} else {
|
||||||
|
b[0] = '-'
|
||||||
|
}
|
||||||
|
if !e.ReadOnly {
|
||||||
|
b[1] = 'w'
|
||||||
|
} else {
|
||||||
|
b[1] = '-'
|
||||||
|
}
|
||||||
|
if e.ExpiresAt > 0 {
|
||||||
|
b[2] = 't'
|
||||||
|
} else {
|
||||||
|
b[2] = '-'
|
||||||
|
}
|
||||||
|
if e.Pinned {
|
||||||
|
b[3] = 'p'
|
||||||
|
} else {
|
||||||
|
b[3] = '-'
|
||||||
|
}
|
||||||
|
return string(b[:])
|
||||||
|
}
|
||||||
|
|
||||||
|
// colorizeMeta returns a colorized meta string for TTY display.
|
||||||
|
// e=bold+yellow, w=bold+red, t=bold+green, p=bold+yellow, unset=dim.
|
||||||
|
func colorizeMeta(e Entry) string {
|
||||||
|
dim := text.Colors{text.Faint}
|
||||||
|
yellow := text.Colors{text.Bold, text.FgYellow}
|
||||||
|
red := text.Colors{text.Bold, text.FgRed}
|
||||||
|
green := text.Colors{text.Bold, text.FgGreen}
|
||||||
|
|
||||||
|
var b strings.Builder
|
||||||
|
if e.Secret {
|
||||||
|
b.WriteString(yellow.Sprint("e"))
|
||||||
|
} else {
|
||||||
|
b.WriteString(dim.Sprint("-"))
|
||||||
|
}
|
||||||
|
if !e.ReadOnly {
|
||||||
|
b.WriteString(red.Sprint("w"))
|
||||||
|
} else {
|
||||||
|
b.WriteString(dim.Sprint("-"))
|
||||||
|
}
|
||||||
|
if e.ExpiresAt > 0 {
|
||||||
|
b.WriteString(green.Sprint("t"))
|
||||||
|
} else {
|
||||||
|
b.WriteString(dim.Sprint("-"))
|
||||||
|
}
|
||||||
|
if e.Pinned {
|
||||||
|
b.WriteString(yellow.Sprint("p"))
|
||||||
|
} else {
|
||||||
|
b.WriteString(dim.Sprint("-"))
|
||||||
|
}
|
||||||
|
return b.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func renderTable(tw table.Writer) {
|
||||||
|
switch listFormat.String() {
|
||||||
|
case "tsv":
|
||||||
|
tw.RenderTSV()
|
||||||
|
case "csv":
|
||||||
|
tw.RenderCSV()
|
||||||
|
case "html":
|
||||||
|
tw.RenderHTML()
|
||||||
|
case "markdown":
|
||||||
|
tw.RenderMarkdown()
|
||||||
|
default:
|
||||||
|
tw.Render()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
listCmd.Flags().BoolVarP(&binary, "binary", "b", false, "include binary data in text output")
|
listCmd.Flags().BoolVarP(&listAll, "all", "a", false, "list across all stores")
|
||||||
listCmd.Flags().BoolVarP(&secret, "secret", "S", false, "display values marked as secret")
|
listCmd.Flags().BoolVarP(&listBase64, "base64", "b", false, "view binary data as base64")
|
||||||
listCmd.Flags().BoolVar(&noKeys, "no-keys", false, "suppress the key column")
|
listCmd.Flags().BoolVarP(&listCount, "count", "c", false, "print only the count of matching entries")
|
||||||
listCmd.Flags().BoolVar(&noValues, "no-values", false, "suppress the value column")
|
listCmd.Flags().BoolVar(&listNoKeys, "no-keys", false, "suppress the key column")
|
||||||
listCmd.Flags().BoolVarP(&ttl, "ttl", "t", false, "append a TTL column when entries expire")
|
listCmd.Flags().BoolVar(&listNoStore, "no-store", false, "suppress the store column")
|
||||||
listCmd.Flags().BoolVar(&header, "header", false, "include header row")
|
listCmd.Flags().BoolVar(&listNoValues, "no-values", false, "suppress the value column")
|
||||||
listCmd.Flags().VarP(&format, "format", "o", "output format (table|tsv|csv|markdown|html)")
|
listCmd.Flags().BoolVar(&listNoMeta, "no-meta", false, "suppress the meta column")
|
||||||
listCmd.Flags().StringSliceP("glob", "g", nil, "Filter keys with glob pattern (repeatable)")
|
listCmd.Flags().BoolVar(&listNoSize, "no-size", false, "suppress the size column")
|
||||||
listCmd.Flags().String("glob-sep", "", fmt.Sprintf("Characters treated as separators for globbing (default %q)", defaultGlobSeparatorsDisplay()))
|
listCmd.Flags().BoolVar(&listNoTTL, "no-ttl", false, "suppress the TTL column")
|
||||||
|
listCmd.Flags().BoolVarP(&listFull, "full", "f", false, "show full values without truncation")
|
||||||
|
listCmd.Flags().BoolVar(&listNoHeader, "no-header", false, "suppress the header row")
|
||||||
|
listCmd.Flags().VarP(&listFormat, "format", "o", "output format (table|tsv|csv|markdown|html|ndjson|json)")
|
||||||
|
listCmd.Flags().StringSliceP("key", "k", nil, "filter keys with glob pattern (repeatable)")
|
||||||
|
listCmd.Flags().StringSliceP("store", "s", nil, "filter stores with glob pattern (repeatable)")
|
||||||
|
listCmd.RegisterFlagCompletionFunc("store", completeStoreFlag)
|
||||||
|
listCmd.Flags().StringSliceP("value", "v", nil, "filter values with glob pattern (repeatable)")
|
||||||
rootCmd.AddCommand(listCmd)
|
rootCmd.AddCommand(listCmd)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,101 +0,0 @@
|
||||||
/*
|
|
||||||
Copyright © 2025 Lewis Wynne <lew@ily.rs>
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in
|
|
||||||
all copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
|
||||||
THE SOFTWARE.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package cmd
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
|
|
||||||
"github.com/jedib0t/go-pretty/v6/table"
|
|
||||||
)
|
|
||||||
|
|
||||||
// ListArgs tracks the resolved flag configuration for the list command.
|
|
||||||
type ListArgs struct {
|
|
||||||
header bool
|
|
||||||
key bool
|
|
||||||
value bool
|
|
||||||
ttl bool
|
|
||||||
binary bool
|
|
||||||
secrets bool
|
|
||||||
render func(table.Writer)
|
|
||||||
}
|
|
||||||
|
|
||||||
// formatEnum implements pflag.Value for format selection.
|
|
||||||
type formatEnum string
|
|
||||||
|
|
||||||
func (e *formatEnum) String() string {
|
|
||||||
return string(*e)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *formatEnum) Set(v string) error {
|
|
||||||
switch v {
|
|
||||||
case "table", "tsv", "csv", "html", "markdown":
|
|
||||||
*e = formatEnum(v)
|
|
||||||
return nil
|
|
||||||
default:
|
|
||||||
return fmt.Errorf("must be one of \"table\", \"tsv\", \"csv\", \"html\", or \"markdown\"")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *formatEnum) Type() string {
|
|
||||||
return "format"
|
|
||||||
}
|
|
||||||
|
|
||||||
var (
|
|
||||||
binary bool = false
|
|
||||||
secret bool = false
|
|
||||||
noKeys bool = false
|
|
||||||
noValues bool = false
|
|
||||||
ttl bool = false
|
|
||||||
header bool = false
|
|
||||||
format formatEnum = "table"
|
|
||||||
)
|
|
||||||
|
|
||||||
func enrichFlags() (ListArgs, error) {
|
|
||||||
var renderFunc func(tw table.Writer)
|
|
||||||
switch format.String() {
|
|
||||||
case "tsv":
|
|
||||||
renderFunc = func(tw table.Writer) { tw.RenderTSV() }
|
|
||||||
case "csv":
|
|
||||||
renderFunc = func(tw table.Writer) { tw.RenderCSV() }
|
|
||||||
case "html":
|
|
||||||
renderFunc = func(tw table.Writer) { tw.RenderHTML() }
|
|
||||||
case "markdown":
|
|
||||||
renderFunc = func(tw table.Writer) { tw.RenderMarkdown() }
|
|
||||||
case "table":
|
|
||||||
renderFunc = func(tw table.Writer) { tw.Render() }
|
|
||||||
}
|
|
||||||
|
|
||||||
if noKeys && noValues && !ttl {
|
|
||||||
return ListArgs{}, fmt.Errorf("no columns selected; disable --no-keys/--no-values or pass --ttl")
|
|
||||||
}
|
|
||||||
|
|
||||||
return ListArgs{
|
|
||||||
header: header,
|
|
||||||
key: !noKeys,
|
|
||||||
value: !noValues,
|
|
||||||
ttl: ttl,
|
|
||||||
binary: binary,
|
|
||||||
render: renderFunc,
|
|
||||||
secrets: secret,
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
@ -1,270 +0,0 @@
|
||||||
/*
|
|
||||||
Copyright © 2025 Lewis Wynne <lew@ily.rs>
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in
|
|
||||||
all copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
|
||||||
THE SOFTWARE.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package cmd
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
"os"
|
|
||||||
"slices"
|
|
||||||
"strconv"
|
|
||||||
|
|
||||||
"github.com/jedib0t/go-pretty/v6/table"
|
|
||||||
"github.com/jedib0t/go-pretty/v6/text"
|
|
||||||
"golang.org/x/term"
|
|
||||||
)
|
|
||||||
|
|
||||||
type columnKind int
|
|
||||||
|
|
||||||
const (
|
|
||||||
columnKey columnKind = iota
|
|
||||||
columnValue
|
|
||||||
columnTTL
|
|
||||||
)
|
|
||||||
|
|
||||||
func requireColumns(args ListArgs) ([]columnKind, error) {
|
|
||||||
var columns []columnKind
|
|
||||||
if args.key {
|
|
||||||
columns = append(columns, columnKey)
|
|
||||||
}
|
|
||||||
if args.value {
|
|
||||||
columns = append(columns, columnValue)
|
|
||||||
}
|
|
||||||
if args.ttl {
|
|
||||||
columns = append(columns, columnTTL)
|
|
||||||
}
|
|
||||||
if len(columns) == 0 {
|
|
||||||
return nil, fmt.Errorf("no columns selected; enable key, value, or ttl output")
|
|
||||||
}
|
|
||||||
return columns, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func buildHeaderCells(columnKinds []columnKind) []string {
|
|
||||||
labels := make([]string, 0, len(columnKinds))
|
|
||||||
for _, column := range columnKinds {
|
|
||||||
switch column {
|
|
||||||
case columnKey:
|
|
||||||
labels = append(labels, "Key")
|
|
||||||
case columnValue:
|
|
||||||
labels = append(labels, "Value")
|
|
||||||
case columnTTL:
|
|
||||||
labels = append(labels, "TTL")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return labels
|
|
||||||
}
|
|
||||||
|
|
||||||
func stringSliceToRow(values []string) table.Row {
|
|
||||||
row := make(table.Row, len(values))
|
|
||||||
for i, val := range values {
|
|
||||||
row[i] = val
|
|
||||||
}
|
|
||||||
return row
|
|
||||||
}
|
|
||||||
|
|
||||||
func updateMaxContentWidths(maxWidths []int, values []string) {
|
|
||||||
if len(maxWidths) == 0 {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
limit := min(len(values), len(maxWidths))
|
|
||||||
for i := range limit {
|
|
||||||
width := text.LongestLineLen(values[i])
|
|
||||||
if width > maxWidths[i] {
|
|
||||||
maxWidths[i] = width
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func applyColumnConstraints(tw table.Writer, columns []columnKind, out io.Writer, maxContentWidths []int) {
|
|
||||||
totalWidth := detectTerminalWidth(out)
|
|
||||||
if totalWidth <= 0 {
|
|
||||||
totalWidth = 100
|
|
||||||
}
|
|
||||||
contentWidth := contentWidthForStyle(totalWidth, tw, len(columns))
|
|
||||||
widths := distributeWidths(contentWidth, columns)
|
|
||||||
|
|
||||||
used := 0
|
|
||||||
for idx, width := range widths {
|
|
||||||
if width <= 0 {
|
|
||||||
width = 1
|
|
||||||
}
|
|
||||||
if idx < len(maxContentWidths) {
|
|
||||||
if actual := maxContentWidths[idx]; actual > 0 && width > actual {
|
|
||||||
width = actual
|
|
||||||
}
|
|
||||||
}
|
|
||||||
widths[idx] = width
|
|
||||||
used += width
|
|
||||||
}
|
|
||||||
|
|
||||||
remaining := contentWidth - used
|
|
||||||
for remaining > 0 {
|
|
||||||
progressed := false
|
|
||||||
for idx := range widths {
|
|
||||||
actual := 0
|
|
||||||
if idx < len(maxContentWidths) {
|
|
||||||
actual = maxContentWidths[idx]
|
|
||||||
}
|
|
||||||
if actual > 0 && widths[idx] >= actual {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
widths[idx]++
|
|
||||||
remaining--
|
|
||||||
progressed = true
|
|
||||||
if remaining == 0 {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if !progressed {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
configs := make([]table.ColumnConfig, 0, len(columns))
|
|
||||||
for idx, width := range widths {
|
|
||||||
configs = append(configs, table.ColumnConfig{
|
|
||||||
Number: idx + 1,
|
|
||||||
WidthMax: width,
|
|
||||||
WidthMaxEnforcer: text.WrapText,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
tw.SetColumnConfigs(configs)
|
|
||||||
tw.SetAllowedRowLength(totalWidth)
|
|
||||||
}
|
|
||||||
|
|
||||||
func contentWidthForStyle(totalWidth int, tw table.Writer, columnCount int) int {
|
|
||||||
if columnCount == 0 {
|
|
||||||
return totalWidth
|
|
||||||
}
|
|
||||||
style := tw.Style()
|
|
||||||
if style != nil {
|
|
||||||
totalWidth -= tableRowOverhead(style, columnCount)
|
|
||||||
}
|
|
||||||
if totalWidth < columnCount {
|
|
||||||
totalWidth = columnCount
|
|
||||||
}
|
|
||||||
return totalWidth
|
|
||||||
}
|
|
||||||
|
|
||||||
func tableRowOverhead(style *table.Style, columnCount int) int {
|
|
||||||
if style == nil || columnCount == 0 {
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
paddingWidth := text.StringWidthWithoutEscSequences(style.Box.PaddingLeft + style.Box.PaddingRight)
|
|
||||||
overhead := paddingWidth * columnCount
|
|
||||||
if style.Options.SeparateColumns && columnCount > 1 {
|
|
||||||
overhead += (columnCount - 1) * maxSeparatorWidth(style)
|
|
||||||
}
|
|
||||||
if style.Options.DrawBorder {
|
|
||||||
overhead += text.StringWidthWithoutEscSequences(style.Box.Left + style.Box.Right)
|
|
||||||
}
|
|
||||||
return overhead
|
|
||||||
}
|
|
||||||
|
|
||||||
func maxSeparatorWidth(style *table.Style) int {
|
|
||||||
widest := 0
|
|
||||||
separators := []string{
|
|
||||||
style.Box.MiddleSeparator,
|
|
||||||
style.Box.EmptySeparator,
|
|
||||||
style.Box.MiddleHorizontal,
|
|
||||||
style.Box.TopSeparator,
|
|
||||||
style.Box.BottomSeparator,
|
|
||||||
style.Box.MiddleVertical,
|
|
||||||
style.Box.LeftSeparator,
|
|
||||||
style.Box.RightSeparator,
|
|
||||||
}
|
|
||||||
for _, sep := range separators {
|
|
||||||
if width := text.StringWidthWithoutEscSequences(sep); width > widest {
|
|
||||||
widest = width
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return widest
|
|
||||||
}
|
|
||||||
|
|
||||||
type fdWriter interface {
|
|
||||||
Fd() uintptr
|
|
||||||
}
|
|
||||||
|
|
||||||
func detectTerminalWidth(out io.Writer) int {
|
|
||||||
if f, ok := out.(fdWriter); ok {
|
|
||||||
if w, _, err := term.GetSize(int(f.Fd())); err == nil && w > 0 {
|
|
||||||
return w
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if w, _, err := term.GetSize(int(os.Stdout.Fd())); err == nil && w > 0 {
|
|
||||||
return w
|
|
||||||
}
|
|
||||||
if cols := os.Getenv("COLUMNS"); cols != "" {
|
|
||||||
if parsed, err := strconv.Atoi(cols); err == nil && parsed > 0 {
|
|
||||||
return parsed
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
|
|
||||||
func distributeWidths(total int, columns []columnKind) []int {
|
|
||||||
if total <= 0 {
|
|
||||||
total = 100
|
|
||||||
}
|
|
||||||
hasTTL := slices.Contains(columns, columnTTL)
|
|
||||||
base := make([]float64, len(columns))
|
|
||||||
sum := 0.0
|
|
||||||
for i, c := range columns {
|
|
||||||
pct := basePercentageForColumn(c, hasTTL)
|
|
||||||
base[i] = pct
|
|
||||||
sum += pct
|
|
||||||
}
|
|
||||||
if sum == 0 {
|
|
||||||
sum = 1
|
|
||||||
}
|
|
||||||
widths := make([]int, len(columns))
|
|
||||||
remaining := total
|
|
||||||
const minColWidth = 10
|
|
||||||
for i := range columns {
|
|
||||||
width := max(int((base[i]/sum)*float64(total)), minColWidth)
|
|
||||||
widths[i] = width
|
|
||||||
remaining -= width
|
|
||||||
}
|
|
||||||
for i := 0; remaining > 0 && len(columns) > 0; i++ {
|
|
||||||
idx := i % len(columns)
|
|
||||||
widths[idx]++
|
|
||||||
remaining--
|
|
||||||
}
|
|
||||||
return widths
|
|
||||||
}
|
|
||||||
|
|
||||||
func basePercentageForColumn(c columnKind, hasTTL bool) float64 {
|
|
||||||
switch c {
|
|
||||||
case columnKey:
|
|
||||||
return 0.25
|
|
||||||
case columnValue:
|
|
||||||
if hasTTL {
|
|
||||||
return 0.5
|
|
||||||
}
|
|
||||||
return 0.75
|
|
||||||
case columnTTL:
|
|
||||||
return 0.25
|
|
||||||
default:
|
|
||||||
return 0.25
|
|
||||||
}
|
|
||||||
}
|
|
||||||
70
cmd/match.go
Normal file
70
cmd/match.go
Normal file
|
|
@ -0,0 +1,70 @@
|
||||||
|
/*
|
||||||
|
Copyright © 2025 Lewis Wynne <lew@ily.rs>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
"unicode/utf8"
|
||||||
|
|
||||||
|
"github.com/gobwas/glob"
|
||||||
|
)
|
||||||
|
|
||||||
|
func compileValueMatchers(patterns []string) ([]glob.Glob, error) {
|
||||||
|
var matchers []glob.Glob
|
||||||
|
for _, pattern := range patterns {
|
||||||
|
m, err := glob.Compile(strings.ToLower(pattern), defaultGlobSeparators...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
matchers = append(matchers, m)
|
||||||
|
}
|
||||||
|
return matchers, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func valueMatch(matchers []glob.Glob, e Entry) bool {
|
||||||
|
if len(matchers) == 0 {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if e.Locked {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if !utf8.Valid(e.Value) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
s := strings.ToLower(string(e.Value))
|
||||||
|
for _, m := range matchers {
|
||||||
|
if m.Match(s) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func formatValuePatterns(patterns []string) string {
|
||||||
|
quoted := make([]string, 0, len(patterns))
|
||||||
|
for _, pattern := range patterns {
|
||||||
|
quoted = append(quoted, fmt.Sprintf("'%s'", pattern))
|
||||||
|
}
|
||||||
|
return strings.Join(quoted, ", ")
|
||||||
|
}
|
||||||
179
cmd/meta.go
Normal file
179
cmd/meta.go
Normal file
|
|
@ -0,0 +1,179 @@
|
||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
)
|
||||||
|
|
||||||
|
var metaCmd = &cobra.Command{
|
||||||
|
Use: "meta KEY[@STORE]",
|
||||||
|
Short: "View or modify metadata for a key",
|
||||||
|
Long: `View or modify metadata (TTL, encryption, read-only, pinned) for a key
|
||||||
|
without changing its value.
|
||||||
|
|
||||||
|
With no flags, displays the key's current metadata. Pass flags to modify.`,
|
||||||
|
Args: cobra.ExactArgs(1),
|
||||||
|
ValidArgsFunction: completeKeys,
|
||||||
|
RunE: meta,
|
||||||
|
SilenceUsage: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
func meta(cmd *cobra.Command, args []string) error {
|
||||||
|
store := &Store{}
|
||||||
|
|
||||||
|
spec, err := store.parseKey(args[0], true)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot meta '%s': %v", args[0], err)
|
||||||
|
}
|
||||||
|
|
||||||
|
identity, _ := loadIdentity()
|
||||||
|
|
||||||
|
p, err := store.storePath(spec.DB)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot meta '%s': %v", args[0], err)
|
||||||
|
}
|
||||||
|
entries, err := readStoreFile(p, identity)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot meta '%s': %v", args[0], err)
|
||||||
|
}
|
||||||
|
idx := findEntry(entries, spec.Key)
|
||||||
|
if idx < 0 {
|
||||||
|
keys := make([]string, len(entries))
|
||||||
|
for i, e := range entries {
|
||||||
|
keys[i] = e.Key
|
||||||
|
}
|
||||||
|
return fmt.Errorf("cannot meta '%s': %w", args[0], suggestKey(spec.Key, keys))
|
||||||
|
}
|
||||||
|
entry := &entries[idx]
|
||||||
|
|
||||||
|
ttlStr, _ := cmd.Flags().GetString("ttl")
|
||||||
|
encryptFlag, _ := cmd.Flags().GetBool("encrypt")
|
||||||
|
decryptFlag, _ := cmd.Flags().GetBool("decrypt")
|
||||||
|
readonlyFlag, _ := cmd.Flags().GetBool("readonly")
|
||||||
|
writableFlag, _ := cmd.Flags().GetBool("writable")
|
||||||
|
pinFlag, _ := cmd.Flags().GetBool("pin")
|
||||||
|
unpinFlag, _ := cmd.Flags().GetBool("unpin")
|
||||||
|
force, _ := cmd.Flags().GetBool("force")
|
||||||
|
|
||||||
|
if encryptFlag && decryptFlag {
|
||||||
|
return fmt.Errorf("cannot meta '%s': --encrypt and --decrypt are mutually exclusive", args[0])
|
||||||
|
}
|
||||||
|
if readonlyFlag && writableFlag {
|
||||||
|
return fmt.Errorf("cannot meta '%s': --readonly and --writable are mutually exclusive", args[0])
|
||||||
|
}
|
||||||
|
if pinFlag && unpinFlag {
|
||||||
|
return fmt.Errorf("cannot meta '%s': --pin and --unpin are mutually exclusive", args[0])
|
||||||
|
}
|
||||||
|
|
||||||
|
// View mode: no flags set
|
||||||
|
isModify := ttlStr != "" || encryptFlag || decryptFlag || readonlyFlag || writableFlag || pinFlag || unpinFlag
|
||||||
|
if !isModify {
|
||||||
|
expiresStr := "never"
|
||||||
|
if entry.ExpiresAt > 0 {
|
||||||
|
expiresStr = formatExpiry(entry.ExpiresAt)
|
||||||
|
}
|
||||||
|
fmt.Fprintf(cmd.OutOrStdout(), " key: %s\n", spec.Full())
|
||||||
|
fmt.Fprintf(cmd.OutOrStdout(), " secret: %v\n", entry.Secret)
|
||||||
|
fmt.Fprintf(cmd.OutOrStdout(), " writable: %v\n", !entry.ReadOnly)
|
||||||
|
fmt.Fprintf(cmd.OutOrStdout(), " pinned: %v\n", entry.Pinned)
|
||||||
|
fmt.Fprintf(cmd.OutOrStdout(), " expires: %s\n", expiresStr)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read-only enforcement: --readonly and --writable always work without --force,
|
||||||
|
// but other modifications on a read-only key require --force.
|
||||||
|
if entry.ReadOnly && !force && !readonlyFlag && !writableFlag {
|
||||||
|
onlyPinChange := !encryptFlag && !decryptFlag && ttlStr == "" && (pinFlag || unpinFlag)
|
||||||
|
if !onlyPinChange {
|
||||||
|
return fmt.Errorf("cannot meta '%s': key is read-only", args[0])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Modification mode — may need identity for encrypt
|
||||||
|
if encryptFlag {
|
||||||
|
identity, err = ensureIdentity()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot meta '%s': %v", args[0], err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
recipients, err := allRecipients(identity)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot meta '%s': %v", args[0], err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var changes []string
|
||||||
|
|
||||||
|
if ttlStr != "" {
|
||||||
|
expiresAt, err := parseTTLString(ttlStr)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot meta '%s': %v", args[0], err)
|
||||||
|
}
|
||||||
|
entry.ExpiresAt = expiresAt
|
||||||
|
if expiresAt == 0 {
|
||||||
|
changes = append(changes, "cleared ttl")
|
||||||
|
} else {
|
||||||
|
changes = append(changes, "set ttl to "+ttlStr)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if encryptFlag {
|
||||||
|
if entry.Secret {
|
||||||
|
return fmt.Errorf("cannot meta '%s': already encrypted", args[0])
|
||||||
|
}
|
||||||
|
if entry.Locked {
|
||||||
|
return fmt.Errorf("cannot meta '%s': secret is locked (identity file missing)", args[0])
|
||||||
|
}
|
||||||
|
entry.Secret = true
|
||||||
|
changes = append(changes, "encrypted")
|
||||||
|
}
|
||||||
|
|
||||||
|
if decryptFlag {
|
||||||
|
if !entry.Secret {
|
||||||
|
return fmt.Errorf("cannot meta '%s': not encrypted", args[0])
|
||||||
|
}
|
||||||
|
if entry.Locked {
|
||||||
|
return fmt.Errorf("cannot meta '%s': secret is locked (identity file missing)", args[0])
|
||||||
|
}
|
||||||
|
entry.Secret = false
|
||||||
|
changes = append(changes, "decrypted")
|
||||||
|
}
|
||||||
|
|
||||||
|
if readonlyFlag {
|
||||||
|
entry.ReadOnly = true
|
||||||
|
changes = append(changes, "made readonly")
|
||||||
|
}
|
||||||
|
if writableFlag {
|
||||||
|
entry.ReadOnly = false
|
||||||
|
changes = append(changes, "made writable")
|
||||||
|
}
|
||||||
|
if pinFlag {
|
||||||
|
entry.Pinned = true
|
||||||
|
changes = append(changes, "pinned")
|
||||||
|
}
|
||||||
|
if unpinFlag {
|
||||||
|
entry.Pinned = false
|
||||||
|
changes = append(changes, "unpinned")
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := writeStoreFile(p, entries, recipients); err != nil {
|
||||||
|
return fmt.Errorf("cannot meta '%s': %v", args[0], err)
|
||||||
|
}
|
||||||
|
|
||||||
|
summary := strings.Join(changes, ", ")
|
||||||
|
okf("%s %s", summary, spec.Display())
|
||||||
|
return autoSync(summary + " " + spec.Display())
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
metaCmd.Flags().String("ttl", "", "set expiry (e.g. 30m, 2h) or 'never' to clear")
|
||||||
|
metaCmd.Flags().BoolP("encrypt", "e", false, "encrypt the value at rest")
|
||||||
|
metaCmd.Flags().BoolP("decrypt", "d", false, "decrypt the value (store as plaintext)")
|
||||||
|
metaCmd.Flags().Bool("readonly", false, "mark the key as read-only")
|
||||||
|
metaCmd.Flags().Bool("writable", false, "clear the read-only flag")
|
||||||
|
metaCmd.Flags().Bool("pin", false, "pin the key (sorts to top in list)")
|
||||||
|
metaCmd.Flags().Bool("unpin", false, "unpin the key")
|
||||||
|
metaCmd.Flags().Bool("force", false, "bypass read-only protection for metadata changes")
|
||||||
|
rootCmd.AddCommand(metaCmd)
|
||||||
|
}
|
||||||
110
cmd/msg.go
Normal file
110
cmd/msg.go
Normal file
|
|
@ -0,0 +1,110 @@
|
||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"golang.org/x/term"
|
||||||
|
)
|
||||||
|
|
||||||
|
// hinted wraps an error with an actionable hint shown on a separate line.
|
||||||
|
type hinted struct {
|
||||||
|
err error
|
||||||
|
hint string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h hinted) Error() string { return h.err.Error() }
|
||||||
|
func (h hinted) Unwrap() error { return h.err }
|
||||||
|
|
||||||
|
func withHint(err error, hint string) error {
|
||||||
|
return hinted{err: err, hint: hint}
|
||||||
|
}
|
||||||
|
|
||||||
|
func stderrIsTerminal() bool {
|
||||||
|
return term.IsTerminal(int(os.Stderr.Fd()))
|
||||||
|
}
|
||||||
|
|
||||||
|
func stdoutIsTerminal() bool {
|
||||||
|
return term.IsTerminal(int(os.Stdout.Fd()))
|
||||||
|
}
|
||||||
|
|
||||||
|
// keyword returns a right-aligned, colored keyword (color only on TTY).
|
||||||
|
// All keywords are bold except dim (code "2").
|
||||||
|
//
|
||||||
|
// FAIL bold red (stderr)
|
||||||
|
// hint dim (stderr)
|
||||||
|
// WARN bold yellow (stderr)
|
||||||
|
// info bold blue (stderr)
|
||||||
|
// ok bold green (stderr)
|
||||||
|
// ? bold cyan (stdout)
|
||||||
|
// > dim (stdout)
|
||||||
|
func keyword(code, word string, tty bool) string {
|
||||||
|
padded := fmt.Sprintf("%4s", word)
|
||||||
|
if tty {
|
||||||
|
if code != "2" {
|
||||||
|
code = "1;" + code
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("\033[%sm%s\033[0m", code, padded)
|
||||||
|
}
|
||||||
|
return padded
|
||||||
|
}
|
||||||
|
|
||||||
|
func printError(err error) {
|
||||||
|
tty := stderrIsTerminal()
|
||||||
|
if tty {
|
||||||
|
fmt.Fprintf(os.Stderr, "%s \033[1m%s\033[0m\n", keyword("31", "FAIL", true), err)
|
||||||
|
} else {
|
||||||
|
fmt.Fprintf(os.Stderr, "%s %s\n", keyword("31", "FAIL", false), err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func printHint(format string, args ...any) {
|
||||||
|
msg := fmt.Sprintf(format, args...)
|
||||||
|
fmt.Fprintf(os.Stderr, "%s %s\n", keyword("2", "hint", stderrIsTerminal()), msg)
|
||||||
|
}
|
||||||
|
|
||||||
|
func warnf(format string, args ...any) {
|
||||||
|
msg := fmt.Sprintf(format, args...)
|
||||||
|
fmt.Fprintf(os.Stderr, "%s %s\n", keyword("33", "WARN", stderrIsTerminal()), msg)
|
||||||
|
}
|
||||||
|
|
||||||
|
func infof(format string, args ...any) {
|
||||||
|
msg := fmt.Sprintf(format, args...)
|
||||||
|
fmt.Fprintf(os.Stderr, "%s %s\n", keyword("34", "info", stderrIsTerminal()), msg)
|
||||||
|
}
|
||||||
|
|
||||||
|
func okf(format string, args ...any) {
|
||||||
|
msg := fmt.Sprintf(format, args...)
|
||||||
|
fmt.Fprintf(os.Stderr, "%s %s\n", keyword("32", "ok", stderrIsTerminal()), msg)
|
||||||
|
}
|
||||||
|
|
||||||
|
func promptf(format string, args ...any) {
|
||||||
|
msg := fmt.Sprintf(format, args...)
|
||||||
|
fmt.Fprintf(os.Stdout, "%s %s\n", keyword("36", "???", stdoutIsTerminal()), msg)
|
||||||
|
}
|
||||||
|
|
||||||
|
func progressf(format string, args ...any) {
|
||||||
|
msg := fmt.Sprintf(format, args...)
|
||||||
|
fmt.Fprintf(os.Stdout, "%s %s\n", keyword("2", ">", stdoutIsTerminal()), msg)
|
||||||
|
}
|
||||||
|
|
||||||
|
func scanln(dest *string) error {
|
||||||
|
fmt.Fprintf(os.Stdout, "%s ", keyword("2", "==>", stdoutIsTerminal()))
|
||||||
|
_, err := fmt.Scanln(dest)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// printErrorWithHints prints the error and any hints found in the error chain.
|
||||||
|
func printErrorWithHints(err error) {
|
||||||
|
printError(err)
|
||||||
|
var h hinted
|
||||||
|
if errors.As(err, &h) {
|
||||||
|
printHint("%s", h.hint)
|
||||||
|
}
|
||||||
|
var nf errNotFound
|
||||||
|
if errors.As(err, &nf) && len(nf.suggestions) > 0 {
|
||||||
|
printHint("did you mean '%s'?", strings.Join(nf.suggestions, "', '"))
|
||||||
|
}
|
||||||
|
}
|
||||||
133
cmd/mv-db.go
Normal file
133
cmd/mv-db.go
Normal file
|
|
@ -0,0 +1,133 @@
|
||||||
|
/*
|
||||||
|
Copyright © 2025 Lewis Wynne <lew@ily.rs>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
)
|
||||||
|
|
||||||
|
// mvStoreCmd represents the move-store command
|
||||||
|
var mvStoreCmd = &cobra.Command{
|
||||||
|
Use: "move-store FROM TO",
|
||||||
|
Short: "Rename a store",
|
||||||
|
Aliases: []string{"mvs"},
|
||||||
|
Args: cobra.ExactArgs(2),
|
||||||
|
ValidArgsFunction: completeStores,
|
||||||
|
RunE: mvStore,
|
||||||
|
SilenceUsage: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
func mvStore(cmd *cobra.Command, args []string) error {
|
||||||
|
store := &Store{}
|
||||||
|
|
||||||
|
fromName, err := store.parseDB(args[0], false)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot rename store '%s': %v", args[0], err)
|
||||||
|
}
|
||||||
|
toName, err := store.parseDB(args[1], false)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot rename store '%s': %v", args[1], err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if fromName == toName {
|
||||||
|
return fmt.Errorf("cannot rename store '%s': source and destination are the same", fromName)
|
||||||
|
}
|
||||||
|
|
||||||
|
var notFound errNotFound
|
||||||
|
fromPath, err := store.FindStore(fromName)
|
||||||
|
if errors.As(err, ¬Found) {
|
||||||
|
return fmt.Errorf("cannot rename store '%s': %w", fromName, err)
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot rename store '%s': %v", fromName, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
interactive, err := cmd.Flags().GetBool("interactive")
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot rename store '%s': %v", fromName, err)
|
||||||
|
}
|
||||||
|
safe, err := cmd.Flags().GetBool("safe")
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot rename store '%s': %v", fromName, err)
|
||||||
|
}
|
||||||
|
yes, err := cmd.Flags().GetBool("yes")
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot rename store '%s': %v", fromName, err)
|
||||||
|
}
|
||||||
|
promptOverwrite := !yes && (interactive || config.Store.AlwaysPromptOverwrite)
|
||||||
|
|
||||||
|
toPath, err := store.storePath(toName)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot rename store '%s': %v", fromName, err)
|
||||||
|
}
|
||||||
|
if _, err := os.Stat(toPath); err == nil {
|
||||||
|
if safe {
|
||||||
|
infof("skipped '@%s': already exists", toName)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if promptOverwrite {
|
||||||
|
promptf("overwrite store '%s'? (y/n)", toName)
|
||||||
|
var confirm string
|
||||||
|
if err := scanln(&confirm); err != nil {
|
||||||
|
return fmt.Errorf("cannot rename store '%s': %v", fromName, err)
|
||||||
|
}
|
||||||
|
if strings.ToLower(confirm) != "y" {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
copy, _ := cmd.Flags().GetBool("copy")
|
||||||
|
var summary string
|
||||||
|
if copy {
|
||||||
|
data, err := os.ReadFile(fromPath)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot copy store '%s': %v", fromName, err)
|
||||||
|
}
|
||||||
|
if err := os.WriteFile(toPath, data, 0o640); err != nil {
|
||||||
|
return fmt.Errorf("cannot copy store '%s': %v", fromName, err)
|
||||||
|
}
|
||||||
|
okf("copied @%s to @%s", fromName, toName)
|
||||||
|
summary = fmt.Sprintf("copied @%s to @%s", fromName, toName)
|
||||||
|
} else {
|
||||||
|
if err := os.Rename(fromPath, toPath); err != nil {
|
||||||
|
return fmt.Errorf("cannot rename store '%s': %v", fromName, err)
|
||||||
|
}
|
||||||
|
okf("renamed @%s to @%s", fromName, toName)
|
||||||
|
summary = fmt.Sprintf("moved @%s to @%s", fromName, toName)
|
||||||
|
}
|
||||||
|
return autoSync(summary)
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
mvStoreCmd.Flags().Bool("copy", false, "copy instead of move (keeps source)")
|
||||||
|
mvStoreCmd.Flags().BoolP("interactive", "i", false, "prompt before overwriting destination")
|
||||||
|
mvStoreCmd.Flags().BoolP("yes", "y", false, "skip all confirmation prompts")
|
||||||
|
mvStoreCmd.Flags().Bool("safe", false, "do not overwrite if the destination store already exists")
|
||||||
|
rootCmd.AddCommand(mvStoreCmd)
|
||||||
|
}
|
||||||
233
cmd/mv.go
233
cmd/mv.go
|
|
@ -26,38 +26,61 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/dgraph-io/badger/v4"
|
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
)
|
)
|
||||||
|
|
||||||
var cpCmd = &cobra.Command{
|
var cpCmd = &cobra.Command{
|
||||||
Use: "cp FROM[@DB] TO[@DB]",
|
Use: "copy FROM[@STORE] TO[@STORE]",
|
||||||
Short: "Make a copy of a key.",
|
Aliases: []string{"cp"},
|
||||||
Args: cobra.ExactArgs(2),
|
Short: "Make a copy of a key",
|
||||||
RunE: cp,
|
Args: cobra.ExactArgs(2),
|
||||||
|
ValidArgsFunction: completeKeys,
|
||||||
|
RunE: cp,
|
||||||
|
SilenceUsage: true,
|
||||||
}
|
}
|
||||||
|
|
||||||
var mvCmd = &cobra.Command{
|
var mvCmd = &cobra.Command{
|
||||||
Use: "mv FROM[@DB] TO[@DB]",
|
Use: "move FROM[@STORE] TO[@STORE]",
|
||||||
Short: "Move a key between (or within) databases.",
|
Aliases: []string{"mv"},
|
||||||
Args: cobra.ExactArgs(2),
|
Short: "Move a key",
|
||||||
RunE: mv,
|
Args: cobra.ExactArgs(2),
|
||||||
SilenceUsage: true,
|
ValidArgsFunction: completeKeys,
|
||||||
|
RunE: mv,
|
||||||
|
SilenceUsage: true,
|
||||||
}
|
}
|
||||||
|
|
||||||
func cp(cmd *cobra.Command, args []string) error {
|
func cp(cmd *cobra.Command, args []string) error {
|
||||||
copy = true
|
return mvImpl(cmd, args, true)
|
||||||
return mv(cmd, args)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func mv(cmd *cobra.Command, args []string) error {
|
func mv(cmd *cobra.Command, args []string) error {
|
||||||
|
keepSource, _ := cmd.Flags().GetBool("copy")
|
||||||
|
return mvImpl(cmd, args, keepSource)
|
||||||
|
}
|
||||||
|
|
||||||
|
func mvImpl(cmd *cobra.Command, args []string, keepSource bool) error {
|
||||||
store := &Store{}
|
store := &Store{}
|
||||||
|
|
||||||
interactive, err := cmd.Flags().GetBool("interactive")
|
interactive, err := cmd.Flags().GetBool("interactive")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
promptOverwrite := interactive || config.Key.AlwaysPromptOverwrite
|
safe, err := cmd.Flags().GetBool("safe")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
yes, err := cmd.Flags().GetBool("yes")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
force, _ := cmd.Flags().GetBool("force")
|
||||||
|
promptOverwrite := !yes && (interactive || config.Key.AlwaysPromptOverwrite)
|
||||||
|
|
||||||
|
identity, _ := loadIdentity()
|
||||||
|
recipients, err := allRecipients(identity)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
fromSpec, err := store.parseKey(args[0], true)
|
fromSpec, err := store.parseKey(args[0], true)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
@ -68,36 +91,57 @@ func mv(cmd *cobra.Command, args []string) error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
var srcVal []byte
|
// Read source
|
||||||
var srcMeta byte
|
srcPath, err := store.storePath(fromSpec.DB)
|
||||||
var srcExpires uint64
|
if err != nil {
|
||||||
fromRef := fromSpec.Full()
|
return fmt.Errorf("cannot move '%s': %v", fromSpec.Key, err)
|
||||||
toRef := toSpec.Full()
|
}
|
||||||
|
srcEntries, err := readStoreFile(srcPath, identity)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot move '%s': %v", fromSpec.Key, err)
|
||||||
|
}
|
||||||
|
srcIdx := findEntry(srcEntries, fromSpec.Key)
|
||||||
|
if srcIdx < 0 {
|
||||||
|
return fmt.Errorf("cannot move '%s': no such key", fromSpec.Key)
|
||||||
|
}
|
||||||
|
srcEntry := srcEntries[srcIdx]
|
||||||
|
|
||||||
var destExists bool
|
// Block moving a read-only source (move removes the source)
|
||||||
if promptOverwrite {
|
if !keepSource && srcEntry.ReadOnly && !force {
|
||||||
existsErr := store.Transaction(TransactionArgs{
|
return fmt.Errorf("cannot move '%s': key is read-only", fromSpec.Key)
|
||||||
key: toRef,
|
}
|
||||||
readonly: true,
|
|
||||||
transact: func(tx *badger.Txn, k []byte) error {
|
sameStore := fromSpec.DB == toSpec.DB
|
||||||
if _, err := tx.Get(k); err == nil {
|
|
||||||
destExists = true
|
// Check destination for overwrite prompt
|
||||||
return nil
|
dstPath := srcPath
|
||||||
} else if err == badger.ErrKeyNotFound {
|
dstEntries := srcEntries
|
||||||
return nil
|
if !sameStore {
|
||||||
}
|
dstPath, err = store.storePath(toSpec.DB)
|
||||||
return err
|
if err != nil {
|
||||||
},
|
return fmt.Errorf("cannot move '%s': %v", fromSpec.Key, err)
|
||||||
})
|
}
|
||||||
if existsErr != nil {
|
dstEntries, err = readStoreFile(dstPath, identity)
|
||||||
return fmt.Errorf("cannot move '%s': %v", fromSpec.Key, existsErr)
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot move '%s': %v", fromSpec.Key, err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if promptOverwrite && destExists {
|
dstIdx := findEntry(dstEntries, toSpec.Key)
|
||||||
|
|
||||||
|
if dstIdx >= 0 && dstEntries[dstIdx].ReadOnly && !force {
|
||||||
|
return fmt.Errorf("cannot overwrite '%s': key is read-only", toSpec.Key)
|
||||||
|
}
|
||||||
|
|
||||||
|
if safe && dstIdx >= 0 {
|
||||||
|
infof("skipped '%s': already exists", toSpec.Display())
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if promptOverwrite && dstIdx >= 0 {
|
||||||
var confirm string
|
var confirm string
|
||||||
fmt.Printf("overwrite '%s'? (y/n)\n", toSpec.Display())
|
promptf("overwrite '%s'? (y/n)", toSpec.Display())
|
||||||
if _, err := fmt.Scanln(&confirm); err != nil {
|
if err := scanln(&confirm); err != nil {
|
||||||
return fmt.Errorf("cannot move '%s': %v", fromSpec.Key, err)
|
return fmt.Errorf("cannot move '%s': %v", fromSpec.Key, err)
|
||||||
}
|
}
|
||||||
if strings.ToLower(confirm) != "y" {
|
if strings.ToLower(confirm) != "y" {
|
||||||
|
|
@ -105,68 +149,73 @@ func mv(cmd *cobra.Command, args []string) error {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
readErr := store.Transaction(TransactionArgs{
|
// Write destination entry — preserve metadata
|
||||||
key: fromRef,
|
newEntry := Entry{
|
||||||
readonly: true,
|
Key: toSpec.Key,
|
||||||
transact: func(tx *badger.Txn, k []byte) error {
|
Value: srcEntry.Value,
|
||||||
item, err := tx.Get(k)
|
ExpiresAt: srcEntry.ExpiresAt,
|
||||||
if err != nil {
|
Secret: srcEntry.Secret,
|
||||||
return fmt.Errorf("cannot move '%s': %v", fromSpec.Key, err)
|
Locked: srcEntry.Locked,
|
||||||
|
ReadOnly: srcEntry.ReadOnly,
|
||||||
|
Pinned: srcEntry.Pinned,
|
||||||
|
}
|
||||||
|
|
||||||
|
if sameStore {
|
||||||
|
// Both source and dest in same file
|
||||||
|
if dstIdx >= 0 {
|
||||||
|
dstEntries[dstIdx] = newEntry
|
||||||
|
} else {
|
||||||
|
dstEntries = append(dstEntries, newEntry)
|
||||||
|
}
|
||||||
|
if !keepSource {
|
||||||
|
// Remove source - find it again since indices may have changed
|
||||||
|
idx := findEntry(dstEntries, fromSpec.Key)
|
||||||
|
if idx >= 0 {
|
||||||
|
dstEntries = append(dstEntries[:idx], dstEntries[idx+1:]...)
|
||||||
}
|
}
|
||||||
srcMeta = item.UserMeta()
|
}
|
||||||
srcExpires = item.ExpiresAt()
|
if err := writeStoreFile(dstPath, dstEntries, recipients); err != nil {
|
||||||
return item.Value(func(v []byte) error {
|
return err
|
||||||
srcVal = append(srcVal[:0], v...)
|
}
|
||||||
return nil
|
} else {
|
||||||
})
|
// Different stores
|
||||||
},
|
if dstIdx >= 0 {
|
||||||
})
|
dstEntries[dstIdx] = newEntry
|
||||||
if readErr != nil {
|
} else {
|
||||||
return readErr
|
dstEntries = append(dstEntries, newEntry)
|
||||||
}
|
}
|
||||||
|
if err := writeStoreFile(dstPath, dstEntries, recipients); err != nil {
|
||||||
writeErr := store.Transaction(TransactionArgs{
|
return err
|
||||||
key: toRef,
|
}
|
||||||
readonly: false,
|
if !keepSource {
|
||||||
sync: false,
|
srcEntries = append(srcEntries[:srcIdx], srcEntries[srcIdx+1:]...)
|
||||||
transact: func(tx *badger.Txn, k []byte) error {
|
if err := writeStoreFile(srcPath, srcEntries, recipients); err != nil {
|
||||||
entry := badger.NewEntry(k, srcVal).WithMeta(srcMeta)
|
return err
|
||||||
if srcExpires > 0 {
|
|
||||||
entry.ExpiresAt = srcExpires
|
|
||||||
}
|
}
|
||||||
return tx.SetEntry(entry)
|
}
|
||||||
},
|
|
||||||
})
|
|
||||||
if writeErr != nil {
|
|
||||||
return writeErr
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if copy {
|
var summary string
|
||||||
return autoSync()
|
if keepSource {
|
||||||
|
okf("copied %s to %s", fromSpec.Display(), toSpec.Display())
|
||||||
|
summary = "copied " + fromSpec.Display() + " to " + toSpec.Display()
|
||||||
|
} else {
|
||||||
|
okf("renamed %s to %s", fromSpec.Display(), toSpec.Display())
|
||||||
|
summary = "moved " + fromSpec.Display() + " to " + toSpec.Display()
|
||||||
}
|
}
|
||||||
|
return autoSync(summary)
|
||||||
if err := store.Transaction(TransactionArgs{
|
|
||||||
key: fromRef,
|
|
||||||
readonly: false,
|
|
||||||
sync: false,
|
|
||||||
transact: func(tx *badger.Txn, k []byte) error {
|
|
||||||
return tx.Delete(k)
|
|
||||||
},
|
|
||||||
}); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return autoSync()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var (
|
|
||||||
copy bool = false
|
|
||||||
)
|
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
mvCmd.Flags().BoolVar(©, "copy", false, "Copy instead of move (keeps source)")
|
mvCmd.Flags().Bool("copy", false, "copy instead of move (keeps source)")
|
||||||
mvCmd.Flags().BoolP("interactive", "i", false, "Prompt before overwriting destination")
|
mvCmd.Flags().BoolP("interactive", "i", false, "prompt before overwriting destination")
|
||||||
|
mvCmd.Flags().BoolP("yes", "y", false, "skip all confirmation prompts")
|
||||||
|
mvCmd.Flags().Bool("safe", false, "do not overwrite if the destination already exists")
|
||||||
|
mvCmd.Flags().Bool("force", false, "bypass read-only protection")
|
||||||
rootCmd.AddCommand(mvCmd)
|
rootCmd.AddCommand(mvCmd)
|
||||||
cpCmd.Flags().BoolP("interactive", "i", false, "Prompt before overwriting destination")
|
cpCmd.Flags().BoolP("interactive", "i", false, "prompt before overwriting destination")
|
||||||
|
cpCmd.Flags().BoolP("yes", "y", false, "skip all confirmation prompts")
|
||||||
|
cpCmd.Flags().Bool("safe", false, "do not overwrite if the destination already exists")
|
||||||
|
cpCmd.Flags().Bool("force", false, "bypass read-only protection")
|
||||||
rootCmd.AddCommand(cpCmd)
|
rootCmd.AddCommand(cpCmd)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
243
cmd/ndjson.go
Normal file
243
cmd/ndjson.go
Normal file
|
|
@ -0,0 +1,243 @@
|
||||||
|
/*
|
||||||
|
Copyright © 2025 Lewis Wynne <lew@ily.rs>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"encoding/base64"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"slices"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
"unicode/utf8"
|
||||||
|
|
||||||
|
"filippo.io/age"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Entry is the in-memory representation of a stored key-value pair.
|
||||||
|
type Entry struct {
|
||||||
|
Key string
|
||||||
|
Value []byte
|
||||||
|
ExpiresAt uint64 // Unix timestamp; 0 = never expires
|
||||||
|
Secret bool // encrypted on disk
|
||||||
|
Locked bool // secret but no identity available to decrypt
|
||||||
|
ReadOnly bool // cannot be modified without --force
|
||||||
|
Pinned bool // sorts to top in list output
|
||||||
|
StoreName string // populated by list --all
|
||||||
|
}
|
||||||
|
|
||||||
|
// jsonEntry is the NDJSON on-disk format.
|
||||||
|
type jsonEntry struct {
|
||||||
|
Key string `json:"key"`
|
||||||
|
Value string `json:"value"`
|
||||||
|
Encoding string `json:"encoding,omitempty"`
|
||||||
|
ExpiresAt *int64 `json:"expires_at,omitempty"`
|
||||||
|
ReadOnly *bool `json:"readonly,omitempty"`
|
||||||
|
Pinned *bool `json:"pinned,omitempty"`
|
||||||
|
Store string `json:"store,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// readStoreFile reads all non-expired entries from an NDJSON file.
|
||||||
|
// Returns empty slice (not error) if file does not exist.
|
||||||
|
// If identity is nil, secret entries are returned as locked.
|
||||||
|
func readStoreFile(path string, identity *age.X25519Identity) ([]Entry, error) {
|
||||||
|
f, err := os.Open(path)
|
||||||
|
if err != nil {
|
||||||
|
if os.IsNotExist(err) {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer f.Close()
|
||||||
|
|
||||||
|
now := uint64(time.Now().Unix())
|
||||||
|
var entries []Entry
|
||||||
|
scanner := bufio.NewScanner(f)
|
||||||
|
scanner.Buffer(make([]byte, 0, 64*1024), 8*1024*1024)
|
||||||
|
lineNo := 0
|
||||||
|
for scanner.Scan() {
|
||||||
|
lineNo++
|
||||||
|
line := scanner.Bytes()
|
||||||
|
if len(line) == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
var je jsonEntry
|
||||||
|
if err := json.Unmarshal(line, &je); err != nil {
|
||||||
|
return nil, fmt.Errorf("line %d: %w", lineNo, err)
|
||||||
|
}
|
||||||
|
entry, err := decodeJsonEntry(je, identity)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("line %d: %w", lineNo, err)
|
||||||
|
}
|
||||||
|
// Skip expired entries
|
||||||
|
if entry.ExpiresAt > 0 && entry.ExpiresAt <= now {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
entries = append(entries, entry)
|
||||||
|
}
|
||||||
|
return entries, scanner.Err()
|
||||||
|
}
|
||||||
|
|
||||||
|
// writeStoreFile atomically writes entries to an NDJSON file, sorted by key.
|
||||||
|
// Expired entries are excluded. Empty entry list writes an empty file.
|
||||||
|
// If recipients is empty, secret entries are written as-is (locked passthrough).
|
||||||
|
func writeStoreFile(path string, entries []Entry, recipients []age.Recipient) error {
|
||||||
|
// Sort by key for deterministic output
|
||||||
|
slices.SortFunc(entries, func(a, b Entry) int {
|
||||||
|
return strings.Compare(a.Key, b.Key)
|
||||||
|
})
|
||||||
|
|
||||||
|
tmp := path + ".tmp"
|
||||||
|
f, err := os.Create(tmp)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer func() {
|
||||||
|
f.Close()
|
||||||
|
os.Remove(tmp) // clean up on failure; no-op after successful rename
|
||||||
|
}()
|
||||||
|
|
||||||
|
w := bufio.NewWriter(f)
|
||||||
|
now := uint64(time.Now().Unix())
|
||||||
|
for _, e := range entries {
|
||||||
|
if e.ExpiresAt > 0 && e.ExpiresAt <= now {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
je, err := encodeJsonEntry(e, recipients)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("key '%s': %w", e.Key, err)
|
||||||
|
}
|
||||||
|
data, err := json.Marshal(je)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("key '%s': %w", e.Key, err)
|
||||||
|
}
|
||||||
|
w.Write(data)
|
||||||
|
w.WriteByte('\n')
|
||||||
|
}
|
||||||
|
if err := w.Flush(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := f.Sync(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := f.Close(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return os.Rename(tmp, path)
|
||||||
|
}
|
||||||
|
|
||||||
|
func decodeJsonEntry(je jsonEntry, identity *age.X25519Identity) (Entry, error) {
|
||||||
|
var expiresAt uint64
|
||||||
|
if je.ExpiresAt != nil {
|
||||||
|
expiresAt = uint64(*je.ExpiresAt)
|
||||||
|
}
|
||||||
|
readOnly := je.ReadOnly != nil && *je.ReadOnly
|
||||||
|
pinned := je.Pinned != nil && *je.Pinned
|
||||||
|
|
||||||
|
if je.Encoding == "secret" {
|
||||||
|
ciphertext, err := base64.StdEncoding.DecodeString(je.Value)
|
||||||
|
if err != nil {
|
||||||
|
return Entry{}, fmt.Errorf("decode secret for '%s': %w", je.Key, err)
|
||||||
|
}
|
||||||
|
if identity == nil {
|
||||||
|
return Entry{Key: je.Key, Value: ciphertext, ExpiresAt: expiresAt, Secret: true, Locked: true, ReadOnly: readOnly, Pinned: pinned}, nil
|
||||||
|
}
|
||||||
|
plaintext, err := decrypt(ciphertext, identity)
|
||||||
|
if err != nil {
|
||||||
|
warnf("cannot decrypt '%s': %v", je.Key, err)
|
||||||
|
return Entry{Key: je.Key, Value: ciphertext, ExpiresAt: expiresAt, Secret: true, Locked: true, ReadOnly: readOnly, Pinned: pinned}, nil
|
||||||
|
}
|
||||||
|
return Entry{Key: je.Key, Value: plaintext, ExpiresAt: expiresAt, Secret: true, ReadOnly: readOnly, Pinned: pinned}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var value []byte
|
||||||
|
switch je.Encoding {
|
||||||
|
case "", "text":
|
||||||
|
value = []byte(je.Value)
|
||||||
|
case "base64":
|
||||||
|
var err error
|
||||||
|
value, err = base64.StdEncoding.DecodeString(je.Value)
|
||||||
|
if err != nil {
|
||||||
|
return Entry{}, fmt.Errorf("decode base64 for '%s': %w", je.Key, err)
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
return Entry{}, fmt.Errorf("unsupported encoding '%s' for '%s'", je.Encoding, je.Key)
|
||||||
|
}
|
||||||
|
return Entry{Key: je.Key, Value: value, ExpiresAt: expiresAt, ReadOnly: readOnly, Pinned: pinned}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func encodeJsonEntry(e Entry, recipients []age.Recipient) (jsonEntry, error) {
|
||||||
|
je := jsonEntry{Key: e.Key}
|
||||||
|
if e.ExpiresAt > 0 {
|
||||||
|
ts := int64(e.ExpiresAt)
|
||||||
|
je.ExpiresAt = &ts
|
||||||
|
}
|
||||||
|
if e.ReadOnly {
|
||||||
|
t := true
|
||||||
|
je.ReadOnly = &t
|
||||||
|
}
|
||||||
|
if e.Pinned {
|
||||||
|
t := true
|
||||||
|
je.Pinned = &t
|
||||||
|
}
|
||||||
|
|
||||||
|
if e.Secret && e.Locked {
|
||||||
|
// Passthrough: Value holds raw ciphertext, re-encode as-is
|
||||||
|
je.Value = base64.StdEncoding.EncodeToString(e.Value)
|
||||||
|
je.Encoding = "secret"
|
||||||
|
return je, nil
|
||||||
|
}
|
||||||
|
if e.Secret {
|
||||||
|
if len(recipients) == 0 {
|
||||||
|
return je, fmt.Errorf("no recipient available to encrypt")
|
||||||
|
}
|
||||||
|
ciphertext, err := encrypt(e.Value, recipients...)
|
||||||
|
if err != nil {
|
||||||
|
return je, fmt.Errorf("encrypt: %w", err)
|
||||||
|
}
|
||||||
|
je.Value = base64.StdEncoding.EncodeToString(ciphertext)
|
||||||
|
je.Encoding = "secret"
|
||||||
|
return je, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if utf8.Valid(e.Value) {
|
||||||
|
je.Value = string(e.Value)
|
||||||
|
je.Encoding = "text"
|
||||||
|
} else {
|
||||||
|
je.Value = base64.StdEncoding.EncodeToString(e.Value)
|
||||||
|
je.Encoding = "base64"
|
||||||
|
}
|
||||||
|
return je, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// findEntry returns the index of the entry with the given key, or -1.
|
||||||
|
func findEntry(entries []Entry, key string) int {
|
||||||
|
for i, e := range entries {
|
||||||
|
if e.Key == key {
|
||||||
|
return i
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return -1
|
||||||
|
}
|
||||||
139
cmd/ndjson_test.go
Normal file
139
cmd/ndjson_test.go
Normal file
|
|
@ -0,0 +1,139 @@
|
||||||
|
/*
|
||||||
|
Copyright © 2025 Lewis Wynne <lew@ily.rs>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"path/filepath"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestReadWriteRoundtrip(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
path := filepath.Join(dir, "test.ndjson")
|
||||||
|
|
||||||
|
entries := []Entry{
|
||||||
|
{Key: "alpha", Value: []byte("hello")},
|
||||||
|
{Key: "beta", Value: []byte("world"), ExpiresAt: uint64(time.Now().Add(time.Hour).Unix())},
|
||||||
|
{Key: "gamma", Value: []byte{0xff, 0xfe}}, // binary
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := writeStoreFile(path, entries, nil); err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
got, err := readStoreFile(path, nil)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(got) != len(entries) {
|
||||||
|
t.Fatalf("got %d entries, want %d", len(got), len(entries))
|
||||||
|
}
|
||||||
|
for i := range entries {
|
||||||
|
if got[i].Key != entries[i].Key {
|
||||||
|
t.Errorf("entry %d: key = %q, want %q", i, got[i].Key, entries[i].Key)
|
||||||
|
}
|
||||||
|
if string(got[i].Value) != string(entries[i].Value) {
|
||||||
|
t.Errorf("entry %d: value mismatch", i)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReadStoreFileSkipsExpired(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
path := filepath.Join(dir, "test.ndjson")
|
||||||
|
|
||||||
|
entries := []Entry{
|
||||||
|
{Key: "alive", Value: []byte("yes")},
|
||||||
|
{Key: "dead", Value: []byte("no"), ExpiresAt: 1}, // expired long ago
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := writeStoreFile(path, entries, nil); err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
got, err := readStoreFile(path, nil)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(got) != 1 || got[0].Key != "alive" {
|
||||||
|
t.Fatalf("expected only 'alive', got %v", got)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReadStoreFileNotExist(t *testing.T) {
|
||||||
|
got, err := readStoreFile("/nonexistent/path.ndjson", nil)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
if len(got) != 0 {
|
||||||
|
t.Fatalf("expected empty, got %d entries", len(got))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWriteStoreFileSortsKeys(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
path := filepath.Join(dir, "test.ndjson")
|
||||||
|
|
||||||
|
entries := []Entry{
|
||||||
|
{Key: "charlie", Value: []byte("3")},
|
||||||
|
{Key: "alpha", Value: []byte("1")},
|
||||||
|
{Key: "bravo", Value: []byte("2")},
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := writeStoreFile(path, entries, nil); err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
got, err := readStoreFile(path, nil)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if got[0].Key != "alpha" || got[1].Key != "bravo" || got[2].Key != "charlie" {
|
||||||
|
t.Fatalf("entries not sorted: %v", got)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWriteStoreFileAtomic(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
path := filepath.Join(dir, "test.ndjson")
|
||||||
|
|
||||||
|
// Write initial data
|
||||||
|
if err := writeStoreFile(path, []Entry{{Key: "a", Value: []byte("1")}}, nil); err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Overwrite — should not leave .tmp files
|
||||||
|
if err := writeStoreFile(path, []Entry{{Key: "b", Value: []byte("2")}}, nil); err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check no .tmp file remains
|
||||||
|
matches, _ := filepath.Glob(filepath.Join(dir, "*.tmp"))
|
||||||
|
if len(matches) > 0 {
|
||||||
|
t.Fatalf("leftover tmp files: %v", matches)
|
||||||
|
}
|
||||||
|
}
|
||||||
354
cmd/restore.go
354
cmd/restore.go
|
|
@ -24,47 +24,55 @@ package cmd
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bufio"
|
"bufio"
|
||||||
"encoding/base64"
|
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"os"
|
"os"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/dgraph-io/badger/v4"
|
"filippo.io/age"
|
||||||
|
|
||||||
|
"github.com/gobwas/glob"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
)
|
)
|
||||||
|
|
||||||
var restoreCmd = &cobra.Command{
|
var restoreCmd = &cobra.Command{
|
||||||
Use: "restore [DB]",
|
Use: "import [STORE]",
|
||||||
Short: "Restore key/value pairs from an NDJSON dump",
|
Short: "Restore key/value pairs from an NDJSON dump",
|
||||||
Aliases: []string{"import"},
|
Aliases: []string{},
|
||||||
Args: cobra.MaximumNArgs(1),
|
Args: cobra.MaximumNArgs(1),
|
||||||
RunE: restore,
|
ValidArgsFunction: completeStores,
|
||||||
SilenceUsage: true,
|
RunE: restore,
|
||||||
|
SilenceUsage: true,
|
||||||
}
|
}
|
||||||
|
|
||||||
func restore(cmd *cobra.Command, args []string) error {
|
func restore(cmd *cobra.Command, args []string) error {
|
||||||
store := &Store{}
|
store := &Store{}
|
||||||
dbName := config.Store.DefaultStoreName
|
explicitStore := len(args) == 1
|
||||||
if len(args) == 1 {
|
targetDB := config.Store.DefaultStoreName
|
||||||
|
if explicitStore {
|
||||||
parsed, err := store.parseDB(args[0], false)
|
parsed, err := store.parseDB(args[0], false)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("cannot restore '%s': %v", args[0], err)
|
return fmt.Errorf("cannot restore '%s': %v", args[0], err)
|
||||||
}
|
}
|
||||||
dbName = parsed
|
targetDB = parsed
|
||||||
}
|
}
|
||||||
displayTarget := "@" + dbName
|
displayTarget := "@" + targetDB
|
||||||
|
|
||||||
globPatterns, err := cmd.Flags().GetStringSlice("glob")
|
keyPatterns, err := cmd.Flags().GetStringSlice("key")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("cannot restore '%s': %v", displayTarget, err)
|
return fmt.Errorf("cannot restore '%s': %v", displayTarget, err)
|
||||||
}
|
}
|
||||||
separators, err := parseGlobSeparators(cmd)
|
matchers, err := compileGlobMatchers(keyPatterns)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("cannot restore '%s': %v", displayTarget, err)
|
return fmt.Errorf("cannot restore '%s': %v", displayTarget, err)
|
||||||
}
|
}
|
||||||
matchers, err := compileGlobMatchers(globPatterns, separators)
|
|
||||||
|
storePatterns, err := cmd.Flags().GetStringSlice("store")
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot restore '%s': %v", displayTarget, err)
|
||||||
|
}
|
||||||
|
storeMatchers, err := compileGlobMatchers(storePatterns)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("cannot restore '%s': %v", displayTarget, err)
|
return fmt.Errorf("cannot restore '%s': %v", displayTarget, err)
|
||||||
}
|
}
|
||||||
|
|
@ -77,95 +85,93 @@ func restore(cmd *cobra.Command, args []string) error {
|
||||||
defer closer.Close()
|
defer closer.Close()
|
||||||
}
|
}
|
||||||
|
|
||||||
db, err := store.open(dbName)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("cannot restore '%s': %v", displayTarget, err)
|
|
||||||
}
|
|
||||||
defer db.Close()
|
|
||||||
|
|
||||||
decoder := json.NewDecoder(bufio.NewReaderSize(reader, 8*1024*1024))
|
decoder := json.NewDecoder(bufio.NewReaderSize(reader, 8*1024*1024))
|
||||||
|
|
||||||
wb := db.NewWriteBatch()
|
|
||||||
defer wb.Cancel()
|
|
||||||
|
|
||||||
interactive, err := cmd.Flags().GetBool("interactive")
|
interactive, err := cmd.Flags().GetBool("interactive")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("cannot restore '%s': %v", displayTarget, err)
|
return fmt.Errorf("cannot restore '%s': %v", displayTarget, err)
|
||||||
}
|
}
|
||||||
promptOverwrite := interactive || config.Key.AlwaysPromptOverwrite
|
promptOverwrite := interactive || config.Key.AlwaysPromptOverwrite
|
||||||
|
|
||||||
entryNo := 0
|
drop, err := cmd.Flags().GetBool("drop")
|
||||||
var restored int
|
if err != nil {
|
||||||
var matched bool
|
|
||||||
|
|
||||||
for {
|
|
||||||
var entry dumpEntry
|
|
||||||
if err := decoder.Decode(&entry); err != nil {
|
|
||||||
if err == io.EOF {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
return fmt.Errorf("cannot restore '%s': entry %d: %w", displayTarget, entryNo+1, err)
|
|
||||||
}
|
|
||||||
entryNo++
|
|
||||||
if entry.Key == "" {
|
|
||||||
return fmt.Errorf("cannot restore '%s': entry %d: missing key", displayTarget, entryNo)
|
|
||||||
}
|
|
||||||
if !globMatch(matchers, entry.Key) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if promptOverwrite {
|
|
||||||
exists, err := keyExistsInDB(db, entry.Key)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("cannot restore '%s': entry %d: %v", displayTarget, entryNo, err)
|
|
||||||
}
|
|
||||||
if exists {
|
|
||||||
fmt.Printf("overwrite '%s'? (y/n)\n", entry.Key)
|
|
||||||
var confirm string
|
|
||||||
if _, err := fmt.Scanln(&confirm); err != nil {
|
|
||||||
return fmt.Errorf("cannot restore '%s': entry %d: %v", displayTarget, entryNo, err)
|
|
||||||
}
|
|
||||||
if strings.ToLower(confirm) != "y" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
value, err := decodeEntryValue(entry)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("cannot restore '%s': entry %d: %w", displayTarget, entryNo, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
entryMeta := byte(0x0)
|
|
||||||
if entry.Secret {
|
|
||||||
entryMeta = metaSecret
|
|
||||||
}
|
|
||||||
|
|
||||||
writeEntry := badger.NewEntry([]byte(entry.Key), value).WithMeta(entryMeta)
|
|
||||||
if entry.ExpiresAt != nil {
|
|
||||||
if *entry.ExpiresAt < 0 {
|
|
||||||
return fmt.Errorf("cannot restore '%s': entry %d: expires_at must be >= 0", displayTarget, entryNo)
|
|
||||||
}
|
|
||||||
writeEntry.ExpiresAt = uint64(*entry.ExpiresAt)
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := wb.SetEntry(writeEntry); err != nil {
|
|
||||||
return fmt.Errorf("cannot restore '%s': entry %d: %w", displayTarget, entryNo, err)
|
|
||||||
}
|
|
||||||
restored++
|
|
||||||
matched = true
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := wb.Flush(); err != nil {
|
|
||||||
return fmt.Errorf("cannot restore '%s': %v", displayTarget, err)
|
return fmt.Errorf("cannot restore '%s': %v", displayTarget, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(matchers) > 0 && !matched {
|
identity, _ := loadIdentity()
|
||||||
return fmt.Errorf("cannot restore '%s': No matches for pattern %s", displayTarget, formatGlobPatterns(globPatterns))
|
recipients, err := allRecipients(identity)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot restore '%s': %v", displayTarget, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
fmt.Fprintf(cmd.ErrOrStderr(), "Restored %d entries into @%s\n", restored, dbName)
|
var promptReader io.Reader
|
||||||
return autoSync()
|
if promptOverwrite {
|
||||||
|
filePath, _ := cmd.Flags().GetString("file")
|
||||||
|
if strings.TrimSpace(filePath) == "" {
|
||||||
|
tty, err := os.Open("/dev/tty")
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot restore '%s': --interactive requires --file (-f) when reading from stdin on this platform", displayTarget)
|
||||||
|
}
|
||||||
|
defer tty.Close()
|
||||||
|
promptReader = tty
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
opts := restoreOpts{
|
||||||
|
matchers: matchers,
|
||||||
|
storeMatchers: storeMatchers,
|
||||||
|
promptOverwrite: promptOverwrite,
|
||||||
|
drop: drop,
|
||||||
|
identity: identity,
|
||||||
|
recipients: recipients,
|
||||||
|
promptReader: promptReader,
|
||||||
|
}
|
||||||
|
|
||||||
|
// When a specific store is given, all entries go there (original behaviour).
|
||||||
|
// Otherwise, route entries to their original store via the "store" field.
|
||||||
|
var summary string
|
||||||
|
if explicitStore {
|
||||||
|
p, err := store.storePath(targetDB)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot restore '%s': %v", displayTarget, err)
|
||||||
|
}
|
||||||
|
restored, err := restoreEntries(decoder, map[string]string{targetDB: p}, targetDB, opts)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot restore '%s': %v", displayTarget, err)
|
||||||
|
}
|
||||||
|
if err := reportRestoreFilters(displayTarget, restored, matchers, keyPatterns, storeMatchers, storePatterns); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
okf("restored %d entries into @%s", restored, targetDB)
|
||||||
|
summary = fmt.Sprintf("imported %d entries into @%s", restored, targetDB)
|
||||||
|
} else {
|
||||||
|
restored, err := restoreEntries(decoder, nil, targetDB, opts)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot restore: %v", err)
|
||||||
|
}
|
||||||
|
if err := reportRestoreFilters(displayTarget, restored, matchers, keyPatterns, storeMatchers, storePatterns); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
okf("restored %d entries", restored)
|
||||||
|
summary = fmt.Sprintf("imported %d entries", restored)
|
||||||
|
}
|
||||||
|
|
||||||
|
return autoSync(summary)
|
||||||
|
}
|
||||||
|
|
||||||
|
func reportRestoreFilters(displayTarget string, restored int, matchers []glob.Glob, keyPatterns []string, storeMatchers []glob.Glob, storePatterns []string) error {
|
||||||
|
hasFilters := len(matchers) > 0 || len(storeMatchers) > 0
|
||||||
|
if hasFilters && restored == 0 {
|
||||||
|
var parts []string
|
||||||
|
if len(matchers) > 0 {
|
||||||
|
parts = append(parts, fmt.Sprintf("key pattern %s", formatGlobPatterns(keyPatterns)))
|
||||||
|
}
|
||||||
|
if len(storeMatchers) > 0 {
|
||||||
|
parts = append(parts, fmt.Sprintf("store pattern %s", formatGlobPatterns(storePatterns)))
|
||||||
|
}
|
||||||
|
return fmt.Errorf("cannot restore '%s': no matches for %s", displayTarget, strings.Join(parts, " and "))
|
||||||
|
}
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func restoreInput(cmd *cobra.Command) (io.Reader, io.Closer, error) {
|
func restoreInput(cmd *cobra.Command) (io.Reader, io.Closer, error) {
|
||||||
|
|
@ -183,41 +189,143 @@ func restoreInput(cmd *cobra.Command) (io.Reader, io.Closer, error) {
|
||||||
return f, f, nil
|
return f, f, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func decodeEntryValue(entry dumpEntry) ([]byte, error) {
|
type restoreOpts struct {
|
||||||
switch entry.Encoding {
|
matchers []glob.Glob
|
||||||
case "", "text":
|
storeMatchers []glob.Glob
|
||||||
return []byte(entry.Value), nil
|
promptOverwrite bool
|
||||||
case "base64":
|
drop bool
|
||||||
b, err := base64.StdEncoding.DecodeString(entry.Value)
|
identity *age.X25519Identity
|
||||||
if err != nil {
|
recipients []age.Recipient
|
||||||
return nil, err
|
promptReader io.Reader
|
||||||
}
|
}
|
||||||
return b, nil
|
|
||||||
default:
|
// restoreEntries decodes NDJSON entries and writes them to store files.
|
||||||
return nil, fmt.Errorf("unsupported encoding %q", entry.Encoding)
|
// storePaths maps store names to file paths. If nil, entries are routed to
|
||||||
|
// their original store (from the "store" field), falling back to defaultDB.
|
||||||
|
func restoreEntries(decoder *json.Decoder, storePaths map[string]string, defaultDB string, opts restoreOpts) (int, error) {
|
||||||
|
s := &Store{}
|
||||||
|
|
||||||
|
// Per-store accumulator.
|
||||||
|
type storeAcc struct {
|
||||||
|
path string
|
||||||
|
entries []Entry
|
||||||
|
loaded bool
|
||||||
}
|
}
|
||||||
|
stores := make(map[string]*storeAcc)
|
||||||
|
|
||||||
|
getStore := func(dbName string) (*storeAcc, error) {
|
||||||
|
if acc, ok := stores[dbName]; ok {
|
||||||
|
return acc, nil
|
||||||
|
}
|
||||||
|
var p string
|
||||||
|
if storePaths != nil {
|
||||||
|
var ok bool
|
||||||
|
p, ok = storePaths[dbName]
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("unexpected store '%s'", dbName)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
var err error
|
||||||
|
p, err = s.storePath(dbName)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
acc := &storeAcc{path: p}
|
||||||
|
if !opts.drop {
|
||||||
|
existing, err := readStoreFile(p, opts.identity)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
acc.entries = existing
|
||||||
|
}
|
||||||
|
acc.loaded = true
|
||||||
|
stores[dbName] = acc
|
||||||
|
return acc, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
entryNo := 0
|
||||||
|
restored := 0
|
||||||
|
|
||||||
|
for {
|
||||||
|
var je jsonEntry
|
||||||
|
if err := decoder.Decode(&je); err != nil {
|
||||||
|
if err == io.EOF {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
return 0, fmt.Errorf("entry %d: %w", entryNo+1, err)
|
||||||
|
}
|
||||||
|
entryNo++
|
||||||
|
if je.Key == "" {
|
||||||
|
return 0, fmt.Errorf("entry %d: missing key", entryNo)
|
||||||
|
}
|
||||||
|
if !globMatch(opts.matchers, je.Key) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if !globMatch(opts.storeMatchers, je.Store) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Determine target store.
|
||||||
|
targetDB := defaultDB
|
||||||
|
if storePaths == nil && je.Store != "" {
|
||||||
|
targetDB = je.Store
|
||||||
|
}
|
||||||
|
|
||||||
|
entry, err := decodeJsonEntry(je, opts.identity)
|
||||||
|
if err != nil {
|
||||||
|
return 0, fmt.Errorf("entry %d: %w", entryNo, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
acc, err := getStore(targetDB)
|
||||||
|
if err != nil {
|
||||||
|
return 0, fmt.Errorf("entry %d: %v", entryNo, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
idx := findEntry(acc.entries, entry.Key)
|
||||||
|
|
||||||
|
if opts.promptOverwrite && idx >= 0 {
|
||||||
|
promptf("overwrite '%s'? (y/n)", entry.Key)
|
||||||
|
var confirm string
|
||||||
|
if opts.promptReader != nil {
|
||||||
|
fmt.Fprintf(os.Stdout, "%s ", keyword("2", "==>", stdoutIsTerminal()))
|
||||||
|
if _, err := fmt.Fscanln(opts.promptReader, &confirm); err != nil {
|
||||||
|
return 0, fmt.Errorf("entry %d: %v", entryNo, err)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if err := scanln(&confirm); err != nil {
|
||||||
|
return 0, fmt.Errorf("entry %d: %v", entryNo, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if strings.ToLower(confirm) != "y" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if idx >= 0 {
|
||||||
|
acc.entries[idx] = entry
|
||||||
|
} else {
|
||||||
|
acc.entries = append(acc.entries, entry)
|
||||||
|
}
|
||||||
|
restored++
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, acc := range stores {
|
||||||
|
if restored > 0 || opts.drop {
|
||||||
|
if err := writeStoreFile(acc.path, acc.entries, opts.recipients); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return restored, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
restoreCmd.Flags().StringP("file", "f", "", "Path to an NDJSON dump (defaults to stdin)")
|
restoreCmd.Flags().StringP("file", "f", "", "path to an NDJSON dump (defaults to stdin)")
|
||||||
restoreCmd.Flags().StringSliceP("glob", "g", nil, "Restore keys matching glob pattern (repeatable)")
|
restoreCmd.Flags().StringSliceP("key", "k", nil, "restore keys matching glob pattern (repeatable)")
|
||||||
restoreCmd.Flags().String("glob-sep", "", fmt.Sprintf("Characters treated as separators for globbing (default %q)", defaultGlobSeparatorsDisplay()))
|
restoreCmd.Flags().StringSliceP("store", "s", nil, "restore entries from stores matching glob pattern (repeatable)")
|
||||||
restoreCmd.Flags().BoolP("interactive", "i", false, "Prompt before overwriting existing keys")
|
restoreCmd.RegisterFlagCompletionFunc("store", completeStoreFlag)
|
||||||
|
restoreCmd.Flags().BoolP("interactive", "i", false, "prompt before overwriting existing keys")
|
||||||
|
restoreCmd.Flags().Bool("drop", false, "drop existing entries before restoring (full replace)")
|
||||||
rootCmd.AddCommand(restoreCmd)
|
rootCmd.AddCommand(restoreCmd)
|
||||||
}
|
}
|
||||||
|
|
||||||
func keyExistsInDB(db *badger.DB, key string) (bool, error) {
|
|
||||||
var exists bool
|
|
||||||
err := db.View(func(tx *badger.Txn) error {
|
|
||||||
_, err := tx.Get([]byte(key))
|
|
||||||
if err == nil {
|
|
||||||
exists = true
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
if err == badger.ErrKeyNotFound {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return err
|
|
||||||
})
|
|
||||||
return exists, err
|
|
||||||
}
|
|
||||||
|
|
|
||||||
57
cmd/root.go
57
cmd/root.go
|
|
@ -31,20 +31,65 @@ import (
|
||||||
|
|
||||||
// rootCmd represents the base command when called without any subcommands
|
// rootCmd represents the base command when called without any subcommands
|
||||||
var rootCmd = &cobra.Command{
|
var rootCmd = &cobra.Command{
|
||||||
Use: "pda",
|
Use: "pda",
|
||||||
Short: "A key-value store.",
|
Short: "A key-value store tool",
|
||||||
Long: asciiArt,
|
Long: asciiArt,
|
||||||
|
SilenceErrors: true, // we print errors ourselves
|
||||||
}
|
}
|
||||||
|
|
||||||
func Execute() {
|
func Execute() {
|
||||||
if configErr != nil {
|
if configErr != nil {
|
||||||
fmt.Fprintln(os.Stderr, "failed to load config:", configErr)
|
cmd, _, _ := rootCmd.Find(os.Args[1:])
|
||||||
os.Exit(1)
|
if !configSafeCmd(cmd) {
|
||||||
|
printError(fmt.Errorf("fatal problem: running pda! doctor automatically"))
|
||||||
|
runDoctor(os.Stderr)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
err := rootCmd.Execute()
|
err := rootCmd.Execute()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
printErrorWithHints(err)
|
||||||
os.Exit(1)
|
os.Exit(1)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() {}
|
// configSafeCmd reports whether cmd can run with a broken config.
|
||||||
|
// Only non-destructive commands that don't depend on parsed config values.
|
||||||
|
func configSafeCmd(cmd *cobra.Command) bool {
|
||||||
|
return cmd == configEditCmd || cmd == configInitCmd ||
|
||||||
|
cmd == configPathCmd || cmd == doctorCmd
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
rootCmd.AddGroup(&cobra.Group{ID: "keys", Title: "Key commands:"})
|
||||||
|
|
||||||
|
setCmd.GroupID = "keys"
|
||||||
|
getCmd.GroupID = "keys"
|
||||||
|
runCmd.GroupID = "keys"
|
||||||
|
mvCmd.GroupID = "keys"
|
||||||
|
cpCmd.GroupID = "keys"
|
||||||
|
delCmd.GroupID = "keys"
|
||||||
|
listCmd.GroupID = "keys"
|
||||||
|
editCmd.GroupID = "keys"
|
||||||
|
metaCmd.GroupID = "keys"
|
||||||
|
identityCmd.GroupID = "keys"
|
||||||
|
|
||||||
|
rootCmd.AddGroup(&cobra.Group{ID: "stores", Title: "Store commands:"})
|
||||||
|
|
||||||
|
listStoresCmd.GroupID = "stores"
|
||||||
|
delStoreCmd.GroupID = "stores"
|
||||||
|
mvStoreCmd.GroupID = "stores"
|
||||||
|
exportCmd.GroupID = "stores"
|
||||||
|
restoreCmd.GroupID = "stores"
|
||||||
|
|
||||||
|
rootCmd.AddGroup(&cobra.Group{ID: "git", Title: "Git commands:"})
|
||||||
|
|
||||||
|
initCmd.GroupID = "git"
|
||||||
|
syncCmd.GroupID = "git"
|
||||||
|
gitCmd.GroupID = "git"
|
||||||
|
|
||||||
|
rootCmd.AddGroup(&cobra.Group{ID: "env", Title: "Environment commands:"})
|
||||||
|
|
||||||
|
configCmd.GroupID = "env"
|
||||||
|
doctorCmd.GroupID = "env"
|
||||||
|
}
|
||||||
|
|
|
||||||
241
cmd/secret.go
Normal file
241
cmd/secret.go
Normal file
|
|
@ -0,0 +1,241 @@
|
||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"filippo.io/age"
|
||||||
|
gap "github.com/muesli/go-app-paths"
|
||||||
|
)
|
||||||
|
|
||||||
|
// identityPath returns the path to the age identity file,
|
||||||
|
// respecting PDA_DATA the same way Store.path() does.
|
||||||
|
func identityPath() (string, error) {
|
||||||
|
if override := os.Getenv("PDA_DATA"); override != "" {
|
||||||
|
return filepath.Join(override, "identity.txt"), nil
|
||||||
|
}
|
||||||
|
scope := gap.NewScope(gap.User, "pda")
|
||||||
|
dir, err := scope.DataPath("")
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return filepath.Join(dir, "identity.txt"), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// loadIdentity loads the age identity from disk.
|
||||||
|
// Returns (nil, nil) if the identity file does not exist.
|
||||||
|
func loadIdentity() (*age.X25519Identity, error) {
|
||||||
|
path, err := identityPath()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
data, err := os.ReadFile(path)
|
||||||
|
if err != nil {
|
||||||
|
if os.IsNotExist(err) {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
identity, err := age.ParseX25519Identity(string(bytes.TrimSpace(data)))
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("parse identity %s: %w", path, err)
|
||||||
|
}
|
||||||
|
return identity, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ensureIdentity loads an existing identity or generates a new one.
|
||||||
|
// On first creation prints an ok message with the file path.
|
||||||
|
func ensureIdentity() (*age.X25519Identity, error) {
|
||||||
|
id, err := loadIdentity()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if id != nil {
|
||||||
|
return id, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
id, err = age.GenerateX25519Identity()
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("generate identity: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
path, err := identityPath()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if err := os.MkdirAll(filepath.Dir(path), 0o700); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if err := os.WriteFile(path, []byte(id.String()+"\n"), 0o600); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
okf("created identity at %s", path)
|
||||||
|
return id, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// recipientsPath returns the path to the additional recipients file,
|
||||||
|
// respecting PDA_DATA the same way identityPath does.
|
||||||
|
func recipientsPath() (string, error) {
|
||||||
|
if override := os.Getenv("PDA_DATA"); override != "" {
|
||||||
|
return filepath.Join(override, "recipients.txt"), nil
|
||||||
|
}
|
||||||
|
scope := gap.NewScope(gap.User, "pda")
|
||||||
|
dir, err := scope.DataPath("")
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return filepath.Join(dir, "recipients.txt"), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// loadRecipients loads additional age recipients from disk.
|
||||||
|
// Returns (nil, nil) if the recipients file does not exist.
|
||||||
|
func loadRecipients() ([]*age.X25519Recipient, error) {
|
||||||
|
path, err := recipientsPath()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
f, err := os.Open(path)
|
||||||
|
if err != nil {
|
||||||
|
if os.IsNotExist(err) {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer f.Close()
|
||||||
|
|
||||||
|
var recipients []*age.X25519Recipient
|
||||||
|
scanner := bufio.NewScanner(f)
|
||||||
|
for scanner.Scan() {
|
||||||
|
line := strings.TrimSpace(scanner.Text())
|
||||||
|
if line == "" || strings.HasPrefix(line, "#") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
r, err := age.ParseX25519Recipient(line)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("parse recipient %q: %w", line, err)
|
||||||
|
}
|
||||||
|
recipients = append(recipients, r)
|
||||||
|
}
|
||||||
|
return recipients, scanner.Err()
|
||||||
|
}
|
||||||
|
|
||||||
|
// saveRecipients writes the recipients file. If the list is empty,
|
||||||
|
// the file is deleted.
|
||||||
|
func saveRecipients(recipients []*age.X25519Recipient) error {
|
||||||
|
path, err := recipientsPath()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if len(recipients) == 0 {
|
||||||
|
if err := os.Remove(path); err != nil && !os.IsNotExist(err) {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if err := os.MkdirAll(filepath.Dir(path), 0o700); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
var buf bytes.Buffer
|
||||||
|
for _, r := range recipients {
|
||||||
|
fmt.Fprintln(&buf, r.String())
|
||||||
|
}
|
||||||
|
return os.WriteFile(path, buf.Bytes(), 0o600)
|
||||||
|
}
|
||||||
|
|
||||||
|
// allRecipients combines the identity's own recipient with any additional
|
||||||
|
// recipients from the recipients file into a single []age.Recipient slice.
|
||||||
|
// Returns nil if identity is nil and no recipients file exists.
|
||||||
|
func allRecipients(identity *age.X25519Identity) ([]age.Recipient, error) {
|
||||||
|
extra, err := loadRecipients()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if identity == nil && len(extra) == 0 {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
var recipients []age.Recipient
|
||||||
|
if identity != nil {
|
||||||
|
recipients = append(recipients, identity.Recipient())
|
||||||
|
}
|
||||||
|
for _, r := range extra {
|
||||||
|
recipients = append(recipients, r)
|
||||||
|
}
|
||||||
|
return recipients, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// encrypt encrypts plaintext for the given recipients using age.
|
||||||
|
func encrypt(plaintext []byte, recipients ...age.Recipient) ([]byte, error) {
|
||||||
|
var buf bytes.Buffer
|
||||||
|
w, err := age.Encrypt(&buf, recipients...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if _, err := w.Write(plaintext); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if err := w.Close(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return buf.Bytes(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// reencryptAllStores decrypts all secrets across all stores with the
|
||||||
|
// given identity, then re-encrypts them for the new recipient list.
|
||||||
|
// Returns the count of re-encrypted secrets.
|
||||||
|
func reencryptAllStores(identity *age.X25519Identity, recipients []age.Recipient) (int, error) {
|
||||||
|
store := &Store{}
|
||||||
|
storeNames, err := store.AllStores()
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
|
||||||
|
count := 0
|
||||||
|
for _, name := range storeNames {
|
||||||
|
p, err := store.storePath(name)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
entries, err := readStoreFile(p, identity)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
hasSecrets := false
|
||||||
|
for _, e := range entries {
|
||||||
|
if e.Secret {
|
||||||
|
if e.Locked {
|
||||||
|
return 0, fmt.Errorf("cannot re-encrypt: secret '%s@%s' is locked (identity cannot decrypt it)", e.Key, name)
|
||||||
|
}
|
||||||
|
hasSecrets = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !hasSecrets {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if err := writeStoreFile(p, entries, recipients); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
for _, e := range entries {
|
||||||
|
if e.Secret {
|
||||||
|
spec := KeySpec{Key: e.Key, DB: name}
|
||||||
|
okf("re-encrypted %s", spec.Display())
|
||||||
|
count++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return count, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// decrypt decrypts age ciphertext with the given identity.
|
||||||
|
func decrypt(ciphertext []byte, identity *age.X25519Identity) ([]byte, error) {
|
||||||
|
r, err := age.Decrypt(bytes.NewReader(ciphertext), identity)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return io.ReadAll(r)
|
||||||
|
}
|
||||||
463
cmd/secret_test.go
Normal file
463
cmd/secret_test.go
Normal file
|
|
@ -0,0 +1,463 @@
|
||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"filippo.io/age"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestEncryptDecryptRoundtrip(t *testing.T) {
|
||||||
|
id, err := generateTestIdentity(t)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
recipient := id.Recipient()
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
plaintext []byte
|
||||||
|
}{
|
||||||
|
{"simple text", []byte("hello world")},
|
||||||
|
{"empty", []byte("")},
|
||||||
|
{"binary", []byte{0x00, 0xff, 0xfe, 0xfd}},
|
||||||
|
{"large", make([]byte, 64*1024)},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
ciphertext, err := encrypt(tt.plaintext, recipient)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("encrypt: %v", err)
|
||||||
|
}
|
||||||
|
if len(ciphertext) == 0 && len(tt.plaintext) > 0 {
|
||||||
|
t.Fatal("ciphertext is empty for non-empty plaintext")
|
||||||
|
}
|
||||||
|
got, err := decrypt(ciphertext, id)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("decrypt: %v", err)
|
||||||
|
}
|
||||||
|
if string(got) != string(tt.plaintext) {
|
||||||
|
t.Errorf("roundtrip mismatch: got %q, want %q", got, tt.plaintext)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLoadIdentityMissing(t *testing.T) {
|
||||||
|
t.Setenv("PDA_DATA", t.TempDir())
|
||||||
|
id, err := loadIdentity()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
if id != nil {
|
||||||
|
t.Fatal("expected nil identity for missing file")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestEnsureIdentityCreatesFile(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
t.Setenv("PDA_DATA", dir)
|
||||||
|
|
||||||
|
id, err := ensureIdentity()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
if id == nil {
|
||||||
|
t.Fatal("expected non-nil identity")
|
||||||
|
}
|
||||||
|
|
||||||
|
path := filepath.Join(dir, "identity.txt")
|
||||||
|
info, err := os.Stat(path)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("identity file not created: %v", err)
|
||||||
|
}
|
||||||
|
if perm := info.Mode().Perm(); perm != 0o600 {
|
||||||
|
t.Errorf("identity file permissions = %o, want 0600", perm)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Second call should return same identity
|
||||||
|
id2, err := ensureIdentity()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
if id2.Recipient().String() != id.Recipient().String() {
|
||||||
|
t.Error("second ensureIdentity returned different identity")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestEnsureIdentityIdempotent(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
t.Setenv("PDA_DATA", dir)
|
||||||
|
|
||||||
|
id1, err := ensureIdentity()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
id2, err := ensureIdentity()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
if id1.String() != id2.String() {
|
||||||
|
t.Error("ensureIdentity is not idempotent")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSecretEntryRoundtrip(t *testing.T) {
|
||||||
|
id, err := generateTestIdentity(t)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
recipients := []age.Recipient{id.Recipient()}
|
||||||
|
dir := t.TempDir()
|
||||||
|
path := filepath.Join(dir, "test.ndjson")
|
||||||
|
|
||||||
|
entries := []Entry{
|
||||||
|
{Key: "plain", Value: []byte("hello")},
|
||||||
|
{Key: "encrypted", Value: []byte("secret-value"), Secret: true},
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := writeStoreFile(path, entries, recipients); err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read with identity — should decrypt
|
||||||
|
got, err := readStoreFile(path, id)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
if len(got) != 2 {
|
||||||
|
t.Fatalf("got %d entries, want 2", len(got))
|
||||||
|
}
|
||||||
|
|
||||||
|
plain := got[findEntry(got, "plain")]
|
||||||
|
if string(plain.Value) != "hello" || plain.Secret || plain.Locked {
|
||||||
|
t.Errorf("plain entry unexpected: %+v", plain)
|
||||||
|
}
|
||||||
|
|
||||||
|
secret := got[findEntry(got, "encrypted")]
|
||||||
|
if string(secret.Value) != "secret-value" {
|
||||||
|
t.Errorf("secret value = %q, want %q", secret.Value, "secret-value")
|
||||||
|
}
|
||||||
|
if !secret.Secret {
|
||||||
|
t.Error("secret entry should have Secret=true")
|
||||||
|
}
|
||||||
|
if secret.Locked {
|
||||||
|
t.Error("secret entry should not be locked when identity available")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSecretEntryLockedWithoutIdentity(t *testing.T) {
|
||||||
|
id, err := generateTestIdentity(t)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
recipients := []age.Recipient{id.Recipient()}
|
||||||
|
dir := t.TempDir()
|
||||||
|
path := filepath.Join(dir, "test.ndjson")
|
||||||
|
|
||||||
|
entries := []Entry{
|
||||||
|
{Key: "encrypted", Value: []byte("secret-value"), Secret: true},
|
||||||
|
}
|
||||||
|
if err := writeStoreFile(path, entries, recipients); err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read without identity — should be locked
|
||||||
|
got, err := readStoreFile(path, nil)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
if len(got) != 1 {
|
||||||
|
t.Fatalf("got %d entries, want 1", len(got))
|
||||||
|
}
|
||||||
|
if !got[0].Secret || !got[0].Locked {
|
||||||
|
t.Errorf("expected Secret=true, Locked=true, got Secret=%v, Locked=%v", got[0].Secret, got[0].Locked)
|
||||||
|
}
|
||||||
|
if string(got[0].Value) == "secret-value" {
|
||||||
|
t.Error("locked entry should not contain plaintext")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLockedPassthrough(t *testing.T) {
|
||||||
|
id, err := generateTestIdentity(t)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
recipients := []age.Recipient{id.Recipient()}
|
||||||
|
dir := t.TempDir()
|
||||||
|
path := filepath.Join(dir, "test.ndjson")
|
||||||
|
|
||||||
|
// Write with encryption
|
||||||
|
entries := []Entry{
|
||||||
|
{Key: "encrypted", Value: []byte("secret-value"), Secret: true},
|
||||||
|
}
|
||||||
|
if err := writeStoreFile(path, entries, recipients); err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read without identity (locked)
|
||||||
|
locked, err := readStoreFile(path, nil)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write back without identity (passthrough)
|
||||||
|
if err := writeStoreFile(path, locked, nil); err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read with identity — should still decrypt
|
||||||
|
got, err := readStoreFile(path, id)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
if len(got) != 1 {
|
||||||
|
t.Fatalf("got %d entries, want 1", len(got))
|
||||||
|
}
|
||||||
|
if string(got[0].Value) != "secret-value" {
|
||||||
|
t.Errorf("after passthrough: value = %q, want %q", got[0].Value, "secret-value")
|
||||||
|
}
|
||||||
|
if !got[0].Secret || got[0].Locked {
|
||||||
|
t.Error("entry should be Secret=true, Locked=false after decryption")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMultiRecipientEncryptDecrypt(t *testing.T) {
|
||||||
|
id1, err := age.GenerateX25519Identity()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
id2, err := age.GenerateX25519Identity()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
recipients := []age.Recipient{id1.Recipient(), id2.Recipient()}
|
||||||
|
plaintext := []byte("shared secret")
|
||||||
|
|
||||||
|
ciphertext, err := encrypt(plaintext, recipients...)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("encrypt: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Both identities should be able to decrypt
|
||||||
|
for i, id := range []*age.X25519Identity{id1, id2} {
|
||||||
|
got, err := decrypt(ciphertext, id)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("identity %d decrypt: %v", i, err)
|
||||||
|
}
|
||||||
|
if string(got) != string(plaintext) {
|
||||||
|
t.Errorf("identity %d: got %q, want %q", i, got, plaintext)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMultiRecipientStoreRoundtrip(t *testing.T) {
|
||||||
|
id1, err := age.GenerateX25519Identity()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
id2, err := age.GenerateX25519Identity()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
recipients := []age.Recipient{id1.Recipient(), id2.Recipient()}
|
||||||
|
dir := t.TempDir()
|
||||||
|
path := filepath.Join(dir, "test.ndjson")
|
||||||
|
|
||||||
|
entries := []Entry{
|
||||||
|
{Key: "secret", Value: []byte("multi-recipient-value"), Secret: true},
|
||||||
|
}
|
||||||
|
if err := writeStoreFile(path, entries, recipients); err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Both identities should decrypt the store
|
||||||
|
for i, id := range []*age.X25519Identity{id1, id2} {
|
||||||
|
got, err := readStoreFile(path, id)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("identity %d read: %v", i, err)
|
||||||
|
}
|
||||||
|
if len(got) != 1 {
|
||||||
|
t.Fatalf("identity %d: got %d entries, want 1", i, len(got))
|
||||||
|
}
|
||||||
|
if string(got[0].Value) != "multi-recipient-value" {
|
||||||
|
t.Errorf("identity %d: value = %q, want %q", i, got[0].Value, "multi-recipient-value")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLoadRecipientsMissing(t *testing.T) {
|
||||||
|
t.Setenv("PDA_DATA", t.TempDir())
|
||||||
|
recipients, err := loadRecipients()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
if recipients != nil {
|
||||||
|
t.Fatal("expected nil recipients for missing file")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSaveLoadRecipientsRoundtrip(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
t.Setenv("PDA_DATA", dir)
|
||||||
|
|
||||||
|
id1, err := age.GenerateX25519Identity()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
id2, err := age.GenerateX25519Identity()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
toSave := []*age.X25519Recipient{id1.Recipient(), id2.Recipient()}
|
||||||
|
if err := saveRecipients(toSave); err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check file permissions
|
||||||
|
path := filepath.Join(dir, "recipients.txt")
|
||||||
|
info, err := os.Stat(path)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("recipients file not created: %v", err)
|
||||||
|
}
|
||||||
|
if perm := info.Mode().Perm(); perm != 0o600 {
|
||||||
|
t.Errorf("recipients file permissions = %o, want 0600", perm)
|
||||||
|
}
|
||||||
|
|
||||||
|
loaded, err := loadRecipients()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
if len(loaded) != 2 {
|
||||||
|
t.Fatalf("got %d recipients, want 2", len(loaded))
|
||||||
|
}
|
||||||
|
if loaded[0].String() != id1.Recipient().String() {
|
||||||
|
t.Errorf("recipient 0 = %s, want %s", loaded[0], id1.Recipient())
|
||||||
|
}
|
||||||
|
if loaded[1].String() != id2.Recipient().String() {
|
||||||
|
t.Errorf("recipient 1 = %s, want %s", loaded[1], id2.Recipient())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSaveRecipientsEmptyDeletesFile(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
t.Setenv("PDA_DATA", dir)
|
||||||
|
|
||||||
|
// Create a recipients file first
|
||||||
|
id, err := age.GenerateX25519Identity()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
if err := saveRecipients([]*age.X25519Recipient{id.Recipient()}); err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save empty list should delete the file
|
||||||
|
if err := saveRecipients(nil); err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
path := filepath.Join(dir, "recipients.txt")
|
||||||
|
if _, err := os.Stat(path); !os.IsNotExist(err) {
|
||||||
|
t.Error("expected recipients file to be deleted")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestAllRecipientsNoIdentityNoFile(t *testing.T) {
|
||||||
|
t.Setenv("PDA_DATA", t.TempDir())
|
||||||
|
recipients, err := allRecipients(nil)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
if recipients != nil {
|
||||||
|
t.Fatal("expected nil recipients")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestAllRecipientsCombines(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
t.Setenv("PDA_DATA", dir)
|
||||||
|
|
||||||
|
id, err := ensureIdentity()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
extra, err := age.GenerateX25519Identity()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
if err := saveRecipients([]*age.X25519Recipient{extra.Recipient()}); err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
recipients, err := allRecipients(id)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
if len(recipients) != 2 {
|
||||||
|
t.Fatalf("got %d recipients, want 2", len(recipients))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReencryptAllStores(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
t.Setenv("PDA_DATA", dir)
|
||||||
|
|
||||||
|
id, err := ensureIdentity()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write a store with a secret
|
||||||
|
storePath := filepath.Join(dir, "test.ndjson")
|
||||||
|
entries := []Entry{
|
||||||
|
{Key: "plain", Value: []byte("hello")},
|
||||||
|
{Key: "secret", Value: []byte("secret-value"), Secret: true},
|
||||||
|
}
|
||||||
|
if err := writeStoreFile(storePath, entries, []age.Recipient{id.Recipient()}); err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate a second identity and re-encrypt for both
|
||||||
|
id2, err := age.GenerateX25519Identity()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
newRecipients := []age.Recipient{id.Recipient(), id2.Recipient()}
|
||||||
|
|
||||||
|
count, err := reencryptAllStores(id, newRecipients)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
if count != 1 {
|
||||||
|
t.Fatalf("re-encrypted %d secrets, want 1", count)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Both identities should be able to decrypt
|
||||||
|
for i, identity := range []*age.X25519Identity{id, id2} {
|
||||||
|
got, err := readStoreFile(storePath, identity)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("identity %d read: %v", i, err)
|
||||||
|
}
|
||||||
|
idx := findEntry(got, "secret")
|
||||||
|
if idx < 0 {
|
||||||
|
t.Fatalf("identity %d: secret key not found", i)
|
||||||
|
}
|
||||||
|
if string(got[idx].Value) != "secret-value" {
|
||||||
|
t.Errorf("identity %d: value = %q, want %q", i, got[idx].Value, "secret-value")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func generateTestIdentity(t *testing.T) (*age.X25519Identity, error) {
|
||||||
|
t.Helper()
|
||||||
|
dir := t.TempDir()
|
||||||
|
t.Setenv("PDA_DATA", dir)
|
||||||
|
return ensureIdentity()
|
||||||
|
}
|
||||||
161
cmd/set.go
161
cmd/set.go
|
|
@ -25,18 +25,23 @@ package cmd
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
|
"os"
|
||||||
"strings"
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
"github.com/dgraph-io/badger/v4"
|
"filippo.io/age"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
)
|
)
|
||||||
|
|
||||||
// setCmd represents the set command
|
// setCmd represents the set command
|
||||||
var setCmd = &cobra.Command{
|
var setCmd = &cobra.Command{
|
||||||
Use: "set KEY[@DB] [VALUE]",
|
Use: "set KEY[@STORE] [VALUE]",
|
||||||
Short: "Set a value for a key by passing VALUE or Stdin. Optionally specify a db.",
|
Short: "Set a key to a given value",
|
||||||
Long: `Set a value for a key by passing VALUE or Stdin. Optionally specify a db.
|
Long: `Set a key to a given value or stdin. Optionally specify a store.
|
||||||
|
|
||||||
|
Pass --encrypt to encrypt the value at rest using age. An identity file
|
||||||
|
is generated automatically on first use.
|
||||||
|
|
||||||
PDA supports parsing Go templates. Actions are delimited with {{ }}.
|
PDA supports parsing Go templates. Actions are delimited with {{ }}.
|
||||||
|
|
||||||
For example:
|
For example:
|
||||||
|
|
@ -45,9 +50,10 @@ For example:
|
||||||
'Hello, {{ default "World" .NAME }}' will default to World if NAME is blank.
|
'Hello, {{ default "World" .NAME }}' will default to World if NAME is blank.
|
||||||
'Hello, {{ require .NAME }}' will error if NAME is blank.
|
'Hello, {{ require .NAME }}' will error if NAME is blank.
|
||||||
'{{ enum .NAME "Alice" "Bob" }}' allows only NAME=Alice or NAME=Bob.`,
|
'{{ enum .NAME "Alice" "Bob" }}' allows only NAME=Alice or NAME=Bob.`,
|
||||||
Aliases: []string{"s"},
|
Aliases: []string{"s"},
|
||||||
Args: cobra.RangeArgs(1, 2),
|
Args: cobra.RangeArgs(1, 2),
|
||||||
RunE: set,
|
ValidArgsFunction: completeKeys,
|
||||||
|
RunE: set,
|
||||||
SilenceUsage: true,
|
SilenceUsage: true,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -58,17 +64,40 @@ func set(cmd *cobra.Command, args []string) error {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
safe, err := cmd.Flags().GetBool("safe")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
promptOverwrite := interactive || config.Key.AlwaysPromptOverwrite
|
promptOverwrite := interactive || config.Key.AlwaysPromptOverwrite
|
||||||
|
|
||||||
|
secret, err := cmd.Flags().GetBool("encrypt")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
secret = secret || config.Key.AlwaysEncrypt
|
||||||
|
|
||||||
spec, err := store.parseKey(args[0], true)
|
spec, err := store.parseKey(args[0], true)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("cannot set '%s': %v", args[0], err)
|
return fmt.Errorf("cannot set '%s': %v", args[0], err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
filePath, err := cmd.Flags().GetString("file")
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot set '%s': %v", args[0], err)
|
||||||
|
}
|
||||||
|
|
||||||
var value []byte
|
var value []byte
|
||||||
if len(args) == 2 {
|
switch {
|
||||||
|
case filePath != "" && len(args) == 2:
|
||||||
|
return fmt.Errorf("cannot set '%s': --file and VALUE argument are mutually exclusive", args[0])
|
||||||
|
case filePath != "":
|
||||||
|
value, err = os.ReadFile(filePath)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot set '%s': %v", args[0], err)
|
||||||
|
}
|
||||||
|
case len(args) == 2:
|
||||||
value = []byte(args[1])
|
value = []byte(args[1])
|
||||||
} else {
|
default:
|
||||||
bytes, err := io.ReadAll(cmd.InOrStdin())
|
bytes, err := io.ReadAll(cmd.InOrStdin())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("cannot set '%s': %v", args[0], err)
|
return fmt.Errorf("cannot set '%s': %v", args[0], err)
|
||||||
|
|
@ -76,58 +105,100 @@ func set(cmd *cobra.Command, args []string) error {
|
||||||
value = bytes
|
value = bytes
|
||||||
}
|
}
|
||||||
|
|
||||||
secret, err := cmd.Flags().GetBool("secret")
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("cannot set '%s': %v", args[0], err)
|
|
||||||
}
|
|
||||||
ttl, err := cmd.Flags().GetDuration("ttl")
|
ttl, err := cmd.Flags().GetDuration("ttl")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("cannot set '%s': %v", args[0], err)
|
return fmt.Errorf("cannot set '%s': %v", args[0], err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if promptOverwrite {
|
// Load or create identity depending on --encrypt flag
|
||||||
exists, err := keyExists(store, spec.Full())
|
var identity *age.X25519Identity
|
||||||
|
if secret {
|
||||||
|
identity, err = ensureIdentity()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("cannot set '%s': %v", args[0], err)
|
return fmt.Errorf("cannot set '%s': %v", args[0], err)
|
||||||
}
|
}
|
||||||
if exists {
|
} else {
|
||||||
fmt.Printf("overwrite '%s'? (y/n)\n", spec.Display())
|
identity, _ = loadIdentity()
|
||||||
var confirm string
|
}
|
||||||
if _, err := fmt.Scanln(&confirm); err != nil {
|
recipients, err := allRecipients(identity)
|
||||||
return fmt.Errorf("cannot set '%s': %v", args[0], err)
|
if err != nil {
|
||||||
}
|
return fmt.Errorf("cannot set '%s': %v", args[0], err)
|
||||||
if strings.ToLower(confirm) != "y" {
|
}
|
||||||
return nil
|
|
||||||
}
|
p, err := store.storePath(spec.DB)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot set '%s': %v", args[0], err)
|
||||||
|
}
|
||||||
|
entries, err := readStoreFile(p, identity)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot set '%s': %v", args[0], err)
|
||||||
|
}
|
||||||
|
|
||||||
|
force, _ := cmd.Flags().GetBool("force")
|
||||||
|
|
||||||
|
idx := findEntry(entries, spec.Key)
|
||||||
|
|
||||||
|
if idx >= 0 && entries[idx].ReadOnly && !force {
|
||||||
|
return fmt.Errorf("cannot set '%s': key is read-only", args[0])
|
||||||
|
}
|
||||||
|
|
||||||
|
if safe && idx >= 0 {
|
||||||
|
infof("skipped '%s': already exists", spec.Display())
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Warn if overwriting an encrypted key without --encrypt
|
||||||
|
if idx >= 0 && entries[idx].Secret && !secret {
|
||||||
|
warnf("overwriting encrypted key '%s' as plaintext", spec.Display())
|
||||||
|
printHint("pass --encrypt to keep it encrypted")
|
||||||
|
}
|
||||||
|
|
||||||
|
if promptOverwrite && idx >= 0 {
|
||||||
|
promptf("overwrite '%s'? (y/n)", spec.Display())
|
||||||
|
var confirm string
|
||||||
|
if err := scanln(&confirm); err != nil {
|
||||||
|
return fmt.Errorf("cannot set '%s': %v", args[0], err)
|
||||||
|
}
|
||||||
|
if strings.ToLower(confirm) != "y" {
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
trans := TransactionArgs{
|
pinFlag, _ := cmd.Flags().GetBool("pin")
|
||||||
key: args[0],
|
readonlyFlag, _ := cmd.Flags().GetBool("readonly")
|
||||||
readonly: false,
|
|
||||||
sync: false,
|
entry := Entry{
|
||||||
transact: func(tx *badger.Txn, k []byte) error {
|
Key: spec.Key,
|
||||||
entry := badger.NewEntry(k, value)
|
Value: value,
|
||||||
if secret {
|
Secret: secret,
|
||||||
entry = entry.WithMeta(metaSecret)
|
ReadOnly: readonlyFlag,
|
||||||
}
|
Pinned: pinFlag,
|
||||||
if ttl != 0 {
|
}
|
||||||
entry = entry.WithTTL(ttl)
|
if ttl != 0 {
|
||||||
}
|
entry.ExpiresAt = uint64(time.Now().Add(ttl).Unix())
|
||||||
return tx.SetEntry(entry)
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := store.Transaction(trans); err != nil {
|
if idx >= 0 {
|
||||||
return err
|
entries[idx] = entry
|
||||||
|
} else {
|
||||||
|
entries = append(entries, entry)
|
||||||
}
|
}
|
||||||
|
|
||||||
return autoSync()
|
if err := writeStoreFile(p, entries, recipients); err != nil {
|
||||||
|
return fmt.Errorf("cannot set '%s': %v", args[0], err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return autoSync("set " + spec.Display())
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
rootCmd.AddCommand(setCmd)
|
rootCmd.AddCommand(setCmd)
|
||||||
setCmd.Flags().Bool("secret", false, "Mark the stored value as a secret")
|
setCmd.Flags().DurationP("ttl", "t", 0, "expire the key after the provided duration (e.g. 24h, 30m)")
|
||||||
setCmd.Flags().DurationP("ttl", "t", 0, "Expire the key after the provided duration (e.g. 24h, 30m)")
|
setCmd.Flags().BoolP("interactive", "i", false, "prompt before overwriting an existing key")
|
||||||
setCmd.Flags().BoolP("interactive", "i", false, "Prompt before overwriting an existing key")
|
setCmd.Flags().BoolP("encrypt", "e", false, "encrypt the value at rest using age")
|
||||||
|
setCmd.Flags().Bool("safe", false, "do not overwrite if the key already exists")
|
||||||
|
setCmd.Flags().Bool("force", false, "bypass read-only protection")
|
||||||
|
setCmd.Flags().Bool("pin", false, "pin the key (sorts to top in list)")
|
||||||
|
setCmd.Flags().Bool("readonly", false, "mark the key as read-only")
|
||||||
|
setCmd.Flags().StringP("file", "f", "", "read value from a file")
|
||||||
}
|
}
|
||||||
|
|
|
||||||
217
cmd/shared.go
217
cmd/shared.go
|
|
@ -23,8 +23,10 @@ THE SOFTWARE.
|
||||||
package cmd
|
package cmd
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"encoding/base64"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
|
"net/http"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
@ -32,68 +34,21 @@ import (
|
||||||
"unicode/utf8"
|
"unicode/utf8"
|
||||||
|
|
||||||
"github.com/agnivade/levenshtein"
|
"github.com/agnivade/levenshtein"
|
||||||
"github.com/dgraph-io/badger/v4"
|
|
||||||
gap "github.com/muesli/go-app-paths"
|
gap "github.com/muesli/go-app-paths"
|
||||||
"golang.org/x/term"
|
"golang.org/x/term"
|
||||||
)
|
)
|
||||||
|
|
||||||
type errNotFound struct {
|
type errNotFound struct {
|
||||||
|
what string // "key" or "store"
|
||||||
suggestions []string
|
suggestions []string
|
||||||
}
|
}
|
||||||
|
|
||||||
const (
|
|
||||||
metaSecret byte = 0x1
|
|
||||||
)
|
|
||||||
|
|
||||||
func (err errNotFound) Error() string {
|
func (err errNotFound) Error() string {
|
||||||
if len(err.suggestions) == 0 {
|
return fmt.Sprintf("no such %s", err.what)
|
||||||
return "No such key"
|
|
||||||
}
|
|
||||||
return fmt.Sprintf("No such key. Did you mean '%s'?", strings.Join(err.suggestions, ", "))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type Store struct{}
|
type Store struct{}
|
||||||
|
|
||||||
type TransactionArgs struct {
|
|
||||||
key string
|
|
||||||
readonly bool
|
|
||||||
sync bool
|
|
||||||
transact func(tx *badger.Txn, key []byte) error
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Store) Transaction(args TransactionArgs) error {
|
|
||||||
spec, err := s.parseKey(args.key, true)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
db, err := s.open(spec.DB)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
defer db.Close()
|
|
||||||
|
|
||||||
if args.sync {
|
|
||||||
err = db.Sync()
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
tx := db.NewTransaction(!args.readonly)
|
|
||||||
defer tx.Discard()
|
|
||||||
|
|
||||||
if err := args.transact(tx, []byte(spec.Key)); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if args.readonly {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
return tx.Commit()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Store) Print(pf string, includeBinary bool, vs ...[]byte) {
|
func (s *Store) Print(pf string, includeBinary bool, vs ...[]byte) {
|
||||||
s.PrintTo(os.Stdout, pf, includeBinary, vs...)
|
s.PrintTo(os.Stdout, pf, includeBinary, vs...)
|
||||||
}
|
}
|
||||||
|
|
@ -114,28 +69,72 @@ func (s *Store) FormatBytes(includeBinary bool, v []byte) string {
|
||||||
return s.formatBytes(includeBinary, v)
|
return s.formatBytes(includeBinary, v)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *Store) formatBytes(includeBinary bool, v []byte) string {
|
func (s *Store) formatBytes(base64Flag bool, v []byte) string {
|
||||||
tty := term.IsTerminal(int(os.Stdout.Fd()))
|
if !utf8.Valid(v) {
|
||||||
if tty && !includeBinary && !utf8.Valid(v) {
|
tty := term.IsTerminal(int(os.Stdout.Fd()))
|
||||||
return "(omitted binary data)"
|
if !tty {
|
||||||
|
return string(v)
|
||||||
|
}
|
||||||
|
if base64Flag {
|
||||||
|
return base64.StdEncoding.EncodeToString(v)
|
||||||
|
}
|
||||||
|
mime := http.DetectContentType(v)
|
||||||
|
return fmt.Sprintf("(binary: %s, %s)", formatSize(len(v)), mime)
|
||||||
}
|
}
|
||||||
return string(v)
|
return string(v)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func formatSize(n int) string {
|
||||||
|
const (
|
||||||
|
ki = 1024
|
||||||
|
mi = 1024 * ki
|
||||||
|
gi = 1024 * mi
|
||||||
|
)
|
||||||
|
switch {
|
||||||
|
case n < ki:
|
||||||
|
return fmt.Sprintf("%d", n)
|
||||||
|
case n < mi:
|
||||||
|
return fmt.Sprintf("%.1fk", float64(n)/float64(ki))
|
||||||
|
case n < gi:
|
||||||
|
return fmt.Sprintf("%.1fM", float64(n)/float64(mi))
|
||||||
|
default:
|
||||||
|
return fmt.Sprintf("%.1fG", float64(n)/float64(gi))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Store) storePath(name string) (string, error) {
|
||||||
|
if name == "" {
|
||||||
|
name = config.Store.DefaultStoreName
|
||||||
|
}
|
||||||
|
dir, err := s.path()
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
target := filepath.Join(dir, name+".ndjson")
|
||||||
|
if err := ensureSubpath(dir, target); err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return target, nil
|
||||||
|
}
|
||||||
|
|
||||||
func (s *Store) AllStores() ([]string, error) {
|
func (s *Store) AllStores() ([]string, error) {
|
||||||
path, err := s.path()
|
dir, err := s.path()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
dirs, err := os.ReadDir(path)
|
entries, err := os.ReadDir(dir)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
if os.IsNotExist(err) {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
var stores []string
|
var stores []string
|
||||||
for _, e := range dirs {
|
for _, e := range entries {
|
||||||
if e.IsDir() {
|
if e.IsDir() || filepath.Ext(e.Name()) != ".ndjson" {
|
||||||
stores = append(stores, e.Name())
|
continue
|
||||||
}
|
}
|
||||||
|
stores = append(stores, strings.TrimSuffix(e.Name(), ".ndjson"))
|
||||||
}
|
}
|
||||||
return stores, nil
|
return stores, nil
|
||||||
}
|
}
|
||||||
|
|
@ -145,22 +144,22 @@ func (s *Store) FindStore(k string) (string, error) {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
path, err := s.path(n)
|
p, err := s.storePath(n)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
info, statErr := os.Stat(path)
|
_, statErr := os.Stat(p)
|
||||||
if strings.TrimSpace(n) == "" || os.IsNotExist(statErr) || (statErr == nil && !info.IsDir()) {
|
if strings.TrimSpace(n) == "" || os.IsNotExist(statErr) {
|
||||||
suggestions, err := s.suggestStores(n)
|
suggestions, err := s.suggestStores(n)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
return "", errNotFound{suggestions}
|
return "", errNotFound{what: "store", suggestions: suggestions}
|
||||||
}
|
}
|
||||||
if statErr != nil {
|
if statErr != nil {
|
||||||
return "", statErr
|
return "", statErr
|
||||||
}
|
}
|
||||||
return path, nil
|
return p, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *Store) parseKey(raw string, defaults bool) (KeySpec, error) {
|
func (s *Store) parseKey(raw string, defaults bool) (KeySpec, error) {
|
||||||
|
|
@ -176,37 +175,22 @@ func (s *Store) parseDB(v string, defaults bool) (string, error) {
|
||||||
if defaults {
|
if defaults {
|
||||||
return config.Store.DefaultStoreName, nil
|
return config.Store.DefaultStoreName, nil
|
||||||
}
|
}
|
||||||
return "", fmt.Errorf("cannot parse db: bad db format, use DB or @DB")
|
return "", fmt.Errorf("cannot parse store: bad store format, use STORE or @STORE")
|
||||||
}
|
}
|
||||||
if err := validateDBName(db); err != nil {
|
if err := validateDBName(db); err != nil {
|
||||||
return "", fmt.Errorf("cannot parse db: %w", err)
|
return "", fmt.Errorf("cannot parse store: %w", err)
|
||||||
}
|
}
|
||||||
return strings.ToLower(db), nil
|
return strings.ToLower(db), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *Store) open(name string) (*badger.DB, error) {
|
func (s *Store) path() (string, error) {
|
||||||
if name == "" {
|
|
||||||
name = config.Store.DefaultStoreName
|
|
||||||
}
|
|
||||||
path, err := s.path(name)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return badger.Open(badger.DefaultOptions(path).WithLoggingLevel(badger.ERROR))
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Store) path(args ...string) (string, error) {
|
|
||||||
if override := os.Getenv("PDA_DATA"); override != "" {
|
if override := os.Getenv("PDA_DATA"); override != "" {
|
||||||
if err := os.MkdirAll(override, 0o750); err != nil {
|
if err := os.MkdirAll(override, 0o750); err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
target := filepath.Join(append([]string{override}, args...)...)
|
return override, nil
|
||||||
if err := ensureSubpath(override, target); err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
return target, nil
|
|
||||||
}
|
}
|
||||||
scope := gap.NewVendorScope(gap.User, "pda", "stores")
|
scope := gap.NewScope(gap.User, "pda")
|
||||||
dir, err := scope.DataPath("")
|
dir, err := scope.DataPath("")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
|
|
@ -214,11 +198,7 @@ func (s *Store) path(args ...string) (string, error) {
|
||||||
if err := os.MkdirAll(dir, 0o750); err != nil {
|
if err := os.MkdirAll(dir, 0o750); err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
target := filepath.Join(append([]string{dir}, args...)...)
|
return dir, nil
|
||||||
if err := ensureSubpath(dir, target); err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
return target, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *Store) suggestStores(target string) ([]string, error) {
|
func (s *Store) suggestStores(target string) ([]string, error) {
|
||||||
|
|
@ -240,6 +220,19 @@ func (s *Store) suggestStores(target string) ([]string, error) {
|
||||||
return suggestions, nil
|
return suggestions, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func suggestKey(target string, keys []string) error {
|
||||||
|
minThreshold := 1
|
||||||
|
maxThreshold := 4
|
||||||
|
threshold := min(max(len(target)/3, minThreshold), maxThreshold)
|
||||||
|
var suggestions []string
|
||||||
|
for _, k := range keys {
|
||||||
|
if levenshtein.ComputeDistance(target, k) <= threshold {
|
||||||
|
suggestions = append(suggestions, k)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return errNotFound{what: "key", suggestions: suggestions}
|
||||||
|
}
|
||||||
|
|
||||||
func ensureSubpath(base, target string) error {
|
func ensureSubpath(base, target string) error {
|
||||||
absBase, err := filepath.Abs(base)
|
absBase, err := filepath.Abs(base)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
@ -262,42 +255,54 @@ func ensureSubpath(base, target string) error {
|
||||||
|
|
||||||
func validateDBName(name string) error {
|
func validateDBName(name string) error {
|
||||||
if strings.ContainsAny(name, `/\~`) {
|
if strings.ContainsAny(name, `/\~`) {
|
||||||
return fmt.Errorf("bad db format, use DB or @DB")
|
return fmt.Errorf("bad store format, use STORE or @STORE")
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func formatExpiry(expiresAt uint64) string {
|
func formatExpiry(expiresAt uint64) string {
|
||||||
if expiresAt == 0 {
|
if expiresAt == 0 {
|
||||||
return "never"
|
return "-"
|
||||||
}
|
}
|
||||||
expiry := time.Unix(int64(expiresAt), 0).UTC()
|
expiry := time.Unix(int64(expiresAt), 0).UTC()
|
||||||
remaining := time.Until(expiry)
|
remaining := time.Until(expiry)
|
||||||
if remaining <= 0 {
|
if remaining <= 0 {
|
||||||
return fmt.Sprintf("%s (expired)", expiry.Format(time.RFC3339))
|
return "expired"
|
||||||
}
|
}
|
||||||
return fmt.Sprintf("%s (in %s)", expiry.Format(time.RFC3339), remaining.Round(time.Second))
|
return remaining.Round(time.Second).String()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Keys returns all keys for the provided database name (or default if empty).
|
// parseTTLString parses a TTL string that is either a duration (e.g. "30m", "2h")
|
||||||
|
// or the special value "never" to clear expiry. Returns the new ExpiresAt value
|
||||||
|
// (0 means no expiry).
|
||||||
|
func parseTTLString(s string) (uint64, error) {
|
||||||
|
if strings.ToLower(s) == "never" {
|
||||||
|
return 0, nil
|
||||||
|
}
|
||||||
|
d, err := time.ParseDuration(s)
|
||||||
|
if err != nil {
|
||||||
|
return 0, fmt.Errorf("invalid ttl '%s': expected a duration (e.g. 30m, 2h) or 'never'", s)
|
||||||
|
}
|
||||||
|
if d <= 0 {
|
||||||
|
return 0, fmt.Errorf("invalid ttl '%s': duration must be positive", s)
|
||||||
|
}
|
||||||
|
return uint64(time.Now().Add(d).Unix()), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Keys returns all keys for the provided store name (or default if empty).
|
||||||
// Keys are returned in lowercase to mirror stored key format.
|
// Keys are returned in lowercase to mirror stored key format.
|
||||||
func (s *Store) Keys(dbName string) ([]string, error) {
|
func (s *Store) Keys(dbName string) ([]string, error) {
|
||||||
db, err := s.open(dbName)
|
p, err := s.storePath(dbName)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
defer db.Close()
|
entries, err := readStoreFile(p, nil)
|
||||||
|
if err != nil {
|
||||||
tx := db.NewTransaction(false)
|
return nil, err
|
||||||
defer tx.Discard()
|
}
|
||||||
|
keys := make([]string, len(entries))
|
||||||
it := tx.NewIterator(badger.DefaultIteratorOptions)
|
for i, e := range entries {
|
||||||
defer it.Close()
|
keys[i] = e.Key
|
||||||
|
|
||||||
var keys []string
|
|
||||||
for it.Rewind(); it.Valid(); it.Next() {
|
|
||||||
item := it.Item()
|
|
||||||
keys = append(keys, string(item.Key()))
|
|
||||||
}
|
}
|
||||||
return keys, nil
|
return keys, nil
|
||||||
}
|
}
|
||||||
|
|
|
||||||
131
cmd/sync.go
Normal file
131
cmd/sync.go
Normal file
|
|
@ -0,0 +1,131 @@
|
||||||
|
/*
|
||||||
|
Copyright © 2025 Lewis Wynne <lew@ily.rs>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
)
|
||||||
|
|
||||||
|
var syncCmd = &cobra.Command{
|
||||||
|
Use: "sync",
|
||||||
|
Short: "Manually sync your stores with Git",
|
||||||
|
SilenceUsage: true,
|
||||||
|
RunE: func(cmd *cobra.Command, args []string) error {
|
||||||
|
msg, _ := cmd.Flags().GetString("message")
|
||||||
|
return sync(true, msg, "sync")
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
syncCmd.Flags().StringP("message", "m", "", "custom commit message (defaults to timestamp)")
|
||||||
|
rootCmd.AddCommand(syncCmd)
|
||||||
|
}
|
||||||
|
|
||||||
|
func sync(manual bool, customMsg string, summary string) error {
|
||||||
|
repoDir, err := ensureVCSInitialized()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
remoteInfo, err := repoRemoteInfo(repoDir)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Commit local changes first so nothing is lost.
|
||||||
|
if err := runGit(repoDir, "add", "-A"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
changed, err := repoHasStagedChanges(repoDir)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if changed {
|
||||||
|
msg := customMsg
|
||||||
|
if msg == "" {
|
||||||
|
msg = renderCommitMessage(config.Git.DefaultCommitMessage, summary)
|
||||||
|
if manual {
|
||||||
|
printHint("use -m to set a custom commit message")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if err := runGit(repoDir, "commit", "-m", msg); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
} else if manual {
|
||||||
|
okf("no changes to commit")
|
||||||
|
}
|
||||||
|
|
||||||
|
if remoteInfo.Ref == "" {
|
||||||
|
if manual {
|
||||||
|
warnf("no remote configured, skipping push")
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fetch remote state.
|
||||||
|
if manual || config.Git.AutoFetch {
|
||||||
|
if err := runGit(repoDir, "fetch", "--prune"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Rebase local commits onto remote if behind.
|
||||||
|
ahead, behind, err := repoAheadBehind(repoDir, remoteInfo.Ref)
|
||||||
|
if err != nil {
|
||||||
|
// Remote ref doesn't exist yet (first push).
|
||||||
|
ahead = 1
|
||||||
|
} else if behind > 0 {
|
||||||
|
if err := pullRemote(repoDir, remoteInfo); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
ahead, _, err = repoAheadBehind(repoDir, remoteInfo.Ref)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Push if ahead.
|
||||||
|
if manual || config.Git.AutoPush {
|
||||||
|
if ahead > 0 {
|
||||||
|
return pushRemote(repoDir, remoteInfo)
|
||||||
|
}
|
||||||
|
if manual {
|
||||||
|
okf("nothing to push")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if manual {
|
||||||
|
okf("in sync!")
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func autoSync(summary string) error {
|
||||||
|
if !config.Git.AutoCommit {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if _, err := ensureVCSInitialized(); err != nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return sync(false, "", summary)
|
||||||
|
}
|
||||||
157
cmd/template.go
Normal file
157
cmd/template.go
Normal file
|
|
@ -0,0 +1,157 @@
|
||||||
|
/*
|
||||||
|
Copyright © 2025 Lewis Wynne <lew@ily.rs>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
"slices"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"text/template"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
// templateFuncMap returns the shared FuncMap used by both value templates
|
||||||
|
// (pda get) and commit message templates.
|
||||||
|
func templateFuncMap() template.FuncMap {
|
||||||
|
return template.FuncMap{
|
||||||
|
"require": func(v any) (string, error) {
|
||||||
|
s := fmt.Sprint(v)
|
||||||
|
if s == "" {
|
||||||
|
return "", fmt.Errorf("required value is missing or empty")
|
||||||
|
}
|
||||||
|
return s, nil
|
||||||
|
},
|
||||||
|
"default": func(def string, v any) string {
|
||||||
|
s := fmt.Sprint(v)
|
||||||
|
if s == "" {
|
||||||
|
return def
|
||||||
|
}
|
||||||
|
return s
|
||||||
|
},
|
||||||
|
"env": os.Getenv,
|
||||||
|
"enum": func(v any, allowed ...string) (string, error) {
|
||||||
|
s := fmt.Sprint(v)
|
||||||
|
if s == "" {
|
||||||
|
return "", fmt.Errorf("enum value is missing or empty")
|
||||||
|
}
|
||||||
|
if slices.Contains(allowed, s) {
|
||||||
|
return s, nil
|
||||||
|
}
|
||||||
|
return "", fmt.Errorf("invalid value '%s', allowed: %v", s, allowed)
|
||||||
|
},
|
||||||
|
"int": func(v any) (int, error) {
|
||||||
|
s := fmt.Sprint(v)
|
||||||
|
i, err := strconv.Atoi(s)
|
||||||
|
if err != nil {
|
||||||
|
return 0, fmt.Errorf("cannot convert to int: %w", err)
|
||||||
|
}
|
||||||
|
return i, nil
|
||||||
|
},
|
||||||
|
"list": func(v any) []string {
|
||||||
|
s := fmt.Sprint(v)
|
||||||
|
if s == "" {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
parts := strings.Split(s, ",")
|
||||||
|
for i := range parts {
|
||||||
|
parts[i] = strings.TrimSpace(parts[i])
|
||||||
|
}
|
||||||
|
return parts
|
||||||
|
},
|
||||||
|
"time": func() string { return time.Now().UTC().Format(time.RFC3339) },
|
||||||
|
"shell": func(command string) (string, error) {
|
||||||
|
sh := os.Getenv("SHELL")
|
||||||
|
if sh == "" {
|
||||||
|
sh = "/bin/sh"
|
||||||
|
}
|
||||||
|
out, err := exec.Command(sh, "-c", command).Output()
|
||||||
|
if err != nil {
|
||||||
|
if exitErr, ok := err.(*exec.ExitError); ok && len(exitErr.Stderr) > 0 {
|
||||||
|
return "", fmt.Errorf("shell %q: %s", command, strings.TrimSpace(string(exitErr.Stderr)))
|
||||||
|
}
|
||||||
|
return "", fmt.Errorf("shell %q: %w", command, err)
|
||||||
|
}
|
||||||
|
return strings.TrimRight(string(out), "\n"), nil
|
||||||
|
},
|
||||||
|
"pda": func(key string) (string, error) {
|
||||||
|
return pdaGet(key, nil)
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// cleanTemplateError strips Go template engine internals from function call
|
||||||
|
// errors, returning just the inner error message. Template execution errors
|
||||||
|
// look like: "template: cmd:1:3: executing "cmd" at <func args>: error calling func: <inner>"
|
||||||
|
// We extract just <inner> for cleaner user-facing output.
|
||||||
|
func cleanTemplateError(err error) error {
|
||||||
|
msg := err.Error()
|
||||||
|
const marker = "error calling "
|
||||||
|
if i := strings.Index(msg, marker); i >= 0 {
|
||||||
|
rest := msg[i+len(marker):]
|
||||||
|
if j := strings.Index(rest, ": "); j >= 0 {
|
||||||
|
return fmt.Errorf("%s", rest[j+2:])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
const maxTemplateDepth = 16
|
||||||
|
|
||||||
|
func templateDepth() int {
|
||||||
|
s := os.Getenv("PDA_TEMPLATE_DEPTH")
|
||||||
|
if s == "" {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
n, _ := strconv.Atoi(s)
|
||||||
|
return n
|
||||||
|
}
|
||||||
|
|
||||||
|
func pdaGet(key string, substitutions []string) (string, error) {
|
||||||
|
depth := templateDepth()
|
||||||
|
if depth >= maxTemplateDepth {
|
||||||
|
return "", fmt.Errorf("pda: max template depth (%d) exceeded", maxTemplateDepth)
|
||||||
|
}
|
||||||
|
exe, err := os.Executable()
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("pda: %w", err)
|
||||||
|
}
|
||||||
|
args := append([]string{"get", key}, substitutions...)
|
||||||
|
cmd := exec.Command(exe, args...)
|
||||||
|
cmd.Env = append(os.Environ(), fmt.Sprintf("PDA_TEMPLATE_DEPTH=%d", depth+1))
|
||||||
|
out, err := cmd.Output()
|
||||||
|
if err != nil {
|
||||||
|
if exitErr, ok := err.(*exec.ExitError); ok && len(exitErr.Stderr) > 0 {
|
||||||
|
msg := strings.TrimSpace(string(exitErr.Stderr))
|
||||||
|
msg = strings.TrimPrefix(msg, "FAIL ")
|
||||||
|
if strings.Contains(msg, "max template depth") {
|
||||||
|
return "", fmt.Errorf("pda: max template depth (%d) exceeded (possible circular reference involving %q)", maxTemplateDepth, key)
|
||||||
|
}
|
||||||
|
return "", fmt.Errorf("pda: %s", msg)
|
||||||
|
}
|
||||||
|
return "", fmt.Errorf("pda: %w", err)
|
||||||
|
}
|
||||||
|
return strings.TrimRight(string(out), "\n"), nil
|
||||||
|
}
|
||||||
447
cmd/vcs.go
447
cmd/vcs.go
|
|
@ -1,9 +1,6 @@
|
||||||
package cmd
|
package cmd
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bufio"
|
|
||||||
"bytes"
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"os"
|
"os"
|
||||||
|
|
@ -11,221 +8,16 @@ import (
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/dgraph-io/badger/v4"
|
|
||||||
gap "github.com/muesli/go-app-paths"
|
|
||||||
"github.com/spf13/cobra"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var vcsCmd = &cobra.Command{
|
|
||||||
Use: "vcs",
|
|
||||||
Short: "Version control utilities",
|
|
||||||
}
|
|
||||||
|
|
||||||
var vcsInitCmd = &cobra.Command{
|
|
||||||
Use: "init [remote-url]",
|
|
||||||
Short: "Initialise or fetch a Git repo for version control",
|
|
||||||
SilenceUsage: true,
|
|
||||||
Args: cobra.MaximumNArgs(1),
|
|
||||||
RunE: vcsInit,
|
|
||||||
}
|
|
||||||
|
|
||||||
var vcsSyncCmd = &cobra.Command{
|
|
||||||
Use: "sync",
|
|
||||||
Short: "export, commit, pull, restore, and push changes",
|
|
||||||
SilenceUsage: true,
|
|
||||||
RunE: func(cmd *cobra.Command, args []string) error {
|
|
||||||
return sync(true)
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
func sync(manual bool) error {
|
|
||||||
store := &Store{}
|
|
||||||
repoDir, err := ensureVCSInitialized()
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
remoteInfo, err := repoRemoteInfo(repoDir)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
var ahead int
|
|
||||||
if remoteInfo.Ref != "" {
|
|
||||||
if manual || config.Git.AutoFetch {
|
|
||||||
if err := runGit(repoDir, "fetch", "--prune"); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
remoteAhead, behind, err := repoAheadBehind(repoDir, remoteInfo.Ref)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
ahead = remoteAhead
|
|
||||||
if behind > 0 {
|
|
||||||
if ahead > 0 {
|
|
||||||
return fmt.Errorf("repo diverged from remote (ahead %d, behind %d); resolve manually", ahead, behind)
|
|
||||||
}
|
|
||||||
fmt.Printf("remote has %d commit(s) not present locally; discard local changes and pull? (y/n)\n", behind)
|
|
||||||
var confirm string
|
|
||||||
if _, err := fmt.Scanln(&confirm); err != nil {
|
|
||||||
return fmt.Errorf("cannot continue sync: %w", err)
|
|
||||||
}
|
|
||||||
if strings.ToLower(confirm) != "y" {
|
|
||||||
return fmt.Errorf("aborted sync")
|
|
||||||
}
|
|
||||||
dirty, err := repoHasChanges(repoDir)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if dirty {
|
|
||||||
stashMsg := fmt.Sprintf("pda sync: %s", time.Now().UTC().Format(time.RFC3339))
|
|
||||||
if err := runGit(repoDir, "stash", "push", "-u", "-m", stashMsg); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if err := pullRemote(repoDir, remoteInfo); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
return restoreAllSnapshots(store, repoDir)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := exportAllStores(store, repoDir); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if err := runGit(repoDir, "add", storeDirName); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
changed, err := repoHasStagedChanges(repoDir)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
madeCommit := false
|
|
||||||
if !changed {
|
|
||||||
fmt.Println("no changes to commit")
|
|
||||||
} else {
|
|
||||||
msg := fmt.Sprintf("sync: %s", time.Now().UTC().Format(time.RFC3339))
|
|
||||||
if err := runGit(repoDir, "commit", "-m", msg); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
madeCommit = true
|
|
||||||
}
|
|
||||||
if manual || config.Git.AutoPush {
|
|
||||||
if remoteInfo.Ref != "" && (madeCommit || ahead > 0) {
|
|
||||||
return pushRemote(repoDir, remoteInfo)
|
|
||||||
}
|
|
||||||
fmt.Println("no remote configured; skipping push")
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
const storeDirName = "stores"
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
vcsInitCmd.Flags().Bool("clean", false, "Remove existing VCS directory before initialising")
|
|
||||||
vcsCmd.AddCommand(vcsInitCmd)
|
|
||||||
vcsCmd.AddCommand(vcsSyncCmd)
|
|
||||||
rootCmd.AddCommand(vcsCmd)
|
|
||||||
}
|
|
||||||
|
|
||||||
func vcsInit(cmd *cobra.Command, args []string) error {
|
|
||||||
repoDir, err := vcsRepoRoot()
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
store := &Store{}
|
|
||||||
|
|
||||||
clean, err := cmd.Flags().GetBool("clean")
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if clean {
|
|
||||||
entries, err := os.ReadDir(repoDir)
|
|
||||||
if err == nil && len(entries) > 0 {
|
|
||||||
fmt.Printf("remove existing VCS directory '%s'? (y/n)\n", repoDir)
|
|
||||||
var confirm string
|
|
||||||
if _, err := fmt.Scanln(&confirm); err != nil {
|
|
||||||
return fmt.Errorf("cannot clean vcs dir: %w", err)
|
|
||||||
}
|
|
||||||
if strings.ToLower(confirm) != "y" {
|
|
||||||
return fmt.Errorf("aborted cleaning vcs dir")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if err := os.RemoveAll(repoDir); err != nil {
|
|
||||||
return fmt.Errorf("cannot clean vcs dir: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
dbs, err := store.AllStores()
|
|
||||||
if err == nil && len(dbs) > 0 {
|
|
||||||
fmt.Printf("remove all existing stores? (y/n)\n")
|
|
||||||
var confirm string
|
|
||||||
if _, err := fmt.Scanln(&confirm); err != nil {
|
|
||||||
return fmt.Errorf("cannot clean stores: %w", err)
|
|
||||||
}
|
|
||||||
if strings.ToLower(confirm) != "y" {
|
|
||||||
return fmt.Errorf("aborted cleaning stores")
|
|
||||||
}
|
|
||||||
if err := wipeAllStores(store); err != nil {
|
|
||||||
return fmt.Errorf("cannot clean stores: %w", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if err := os.MkdirAll(filepath.Join(repoDir), 0o750); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
gitDir := filepath.Join(repoDir, ".git")
|
|
||||||
if _, err := os.Stat(gitDir); os.IsNotExist(err) {
|
|
||||||
if len(args) == 1 {
|
|
||||||
remote := args[0]
|
|
||||||
fmt.Printf("running: git clone %s %s\n", remote, repoDir)
|
|
||||||
if err := runGit("", "clone", remote, repoDir); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
fmt.Printf("running: git init\n")
|
|
||||||
if err := runGit(repoDir, "init"); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
fmt.Println("vcs already initialised; use --clean to reinitialise")
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := writeGitignore(repoDir); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(args) == 0 {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return restoreAllSnapshots(store, repoDir)
|
|
||||||
}
|
|
||||||
|
|
||||||
func vcsRepoRoot() (string, error) {
|
|
||||||
scope := gap.NewVendorScope(gap.User, "pda", "vcs")
|
|
||||||
dir, err := scope.DataPath("")
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
if err := os.MkdirAll(dir, 0o750); err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
return dir, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func ensureVCSInitialized() (string, error) {
|
func ensureVCSInitialized() (string, error) {
|
||||||
repoDir, err := vcsRepoRoot()
|
repoDir, err := (&Store{}).path()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
if _, err := os.Stat(filepath.Join(repoDir, ".git")); err != nil {
|
if _, err := os.Stat(filepath.Join(repoDir, ".git")); err != nil {
|
||||||
if os.IsNotExist(err) {
|
if os.IsNotExist(err) {
|
||||||
return "", fmt.Errorf("vcs repository not initialised; run 'pda vcs init' first")
|
return "", withHint(fmt.Errorf("vcs not initialised"), "run 'pda init' first")
|
||||||
}
|
}
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
|
|
@ -238,10 +30,8 @@ func writeGitignore(repoDir string) error {
|
||||||
content := strings.Join([]string{
|
content := strings.Join([]string{
|
||||||
"# generated by pda",
|
"# generated by pda",
|
||||||
"*",
|
"*",
|
||||||
"!/",
|
|
||||||
"!.gitignore",
|
"!.gitignore",
|
||||||
"!" + storeDirName + "/",
|
"!*.ndjson",
|
||||||
"!" + storeDirName + "/*",
|
|
||||||
"",
|
"",
|
||||||
}, "\n")
|
}, "\n")
|
||||||
if err := os.WriteFile(path, []byte(content), 0o640); err != nil {
|
if err := os.WriteFile(path, []byte(content), 0o640); err != nil {
|
||||||
|
|
@ -251,73 +41,9 @@ func writeGitignore(repoDir string) error {
|
||||||
if err := runGit(repoDir, "add", ".gitignore"); err != nil {
|
if err := runGit(repoDir, "add", ".gitignore"); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
return runGit(repoDir, "commit", "--allow-empty", "-m", "generated gitignore")
|
return runGit(repoDir, "commit", "-m", "generated gitignore")
|
||||||
}
|
}
|
||||||
fmt.Println("Existing .gitignore found.")
|
okf("existing .gitignore found")
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func snapshotDB(store *Store, repoDir, db string) error {
|
|
||||||
targetDir := filepath.Join(repoDir, storeDirName)
|
|
||||||
if err := os.MkdirAll(targetDir, 0o750); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
target := filepath.Join(targetDir, fmt.Sprintf("%s.ndjson", db))
|
|
||||||
f, err := os.Create(target)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
defer f.Close()
|
|
||||||
|
|
||||||
opts := DumpOptions{
|
|
||||||
Encoding: "auto",
|
|
||||||
IncludeSecret: false,
|
|
||||||
}
|
|
||||||
if err := dumpDatabase(store, db, f, opts); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return f.Sync()
|
|
||||||
}
|
|
||||||
|
|
||||||
// exportAllStores writes every Badger store to ndjson files under repoDir/stores
|
|
||||||
// and removes stale snapshot files for deleted databases.
|
|
||||||
func exportAllStores(store *Store, repoDir string) error {
|
|
||||||
stores, err := store.AllStores()
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
targetDir := filepath.Join(repoDir, storeDirName)
|
|
||||||
if err := os.MkdirAll(targetDir, 0o750); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
current := make(map[string]struct{})
|
|
||||||
for _, db := range stores {
|
|
||||||
current[db] = struct{}{}
|
|
||||||
if err := snapshotDB(store, repoDir, db); err != nil {
|
|
||||||
return fmt.Errorf("snapshot %q: %w", db, err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
entries, err := os.ReadDir(targetDir)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
for _, e := range entries {
|
|
||||||
if e.IsDir() || filepath.Ext(e.Name()) != ".ndjson" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
dbName := strings.TrimSuffix(e.Name(), ".ndjson")
|
|
||||||
if _, ok := current[dbName]; ok {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if err := os.Remove(filepath.Join(targetDir, e.Name())); err != nil && !os.IsNotExist(err) {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -387,16 +113,6 @@ func repoAheadBehind(dir, ref string) (int, int, error) {
|
||||||
return ahead, behind, nil
|
return ahead, behind, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func repoHasChanges(dir string) (bool, error) {
|
|
||||||
cmd := exec.Command("git", "status", "--porcelain")
|
|
||||||
cmd.Dir = dir
|
|
||||||
out, err := cmd.Output()
|
|
||||||
if err != nil {
|
|
||||||
return false, err
|
|
||||||
}
|
|
||||||
return len(bytes.TrimSpace(out)) > 0, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func repoHasStagedChanges(dir string) (bool, error) {
|
func repoHasStagedChanges(dir string) (bool, error) {
|
||||||
cmd := exec.Command("git", "diff", "--cached", "--quiet")
|
cmd := exec.Command("git", "diff", "--cached", "--quiet")
|
||||||
cmd.Dir = dir
|
cmd.Dir = dir
|
||||||
|
|
@ -412,26 +128,16 @@ func repoHasStagedChanges(dir string) (bool, error) {
|
||||||
|
|
||||||
func pullRemote(dir string, info gitRemoteInfo) error {
|
func pullRemote(dir string, info gitRemoteInfo) error {
|
||||||
if info.HasUpstream {
|
if info.HasUpstream {
|
||||||
return runGit(dir, "pull", "--ff-only")
|
return runGit(dir, "pull", "--rebase")
|
||||||
}
|
}
|
||||||
if info.Remote != "" && info.Branch != "" {
|
return runGit(dir, "pull", "--rebase", info.Remote, info.Branch)
|
||||||
fmt.Printf("running: git pull --ff-only %s %s\n", info.Remote, info.Branch)
|
|
||||||
return runGit(dir, "pull", "--ff-only", info.Remote, info.Branch)
|
|
||||||
}
|
|
||||||
fmt.Println("no remote configured; skipping pull")
|
|
||||||
return nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func pushRemote(dir string, info gitRemoteInfo) error {
|
func pushRemote(dir string, info gitRemoteInfo) error {
|
||||||
if info.HasUpstream {
|
if info.HasUpstream {
|
||||||
return runGit(dir, "push")
|
return runGit(dir, "push")
|
||||||
}
|
}
|
||||||
if info.Remote != "" && info.Branch != "" {
|
return runGit(dir, "push", "-u", info.Remote, info.Branch)
|
||||||
fmt.Printf("running: git push -u %s %s\n", info.Remote, info.Branch)
|
|
||||||
return runGit(dir, "push", "-u", info.Remote, info.Branch)
|
|
||||||
}
|
|
||||||
fmt.Println("no remote configured; skipping push")
|
|
||||||
return nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func repoHasUpstream(dir string) (bool, error) {
|
func repoHasUpstream(dir string) (bool, error) {
|
||||||
|
|
@ -478,150 +184,19 @@ func currentBranch(dir string) (string, error) {
|
||||||
return branch, nil
|
return branch, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func restoreAllSnapshots(store *Store, repoDir string) error {
|
|
||||||
targetDir := filepath.Join(repoDir, storeDirName)
|
|
||||||
entries, err := os.ReadDir(targetDir)
|
|
||||||
if err != nil {
|
|
||||||
if os.IsNotExist(err) {
|
|
||||||
fmt.Printf("no existing stores found, not restoring")
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
snapshotDBs := make(map[string]struct{})
|
|
||||||
|
|
||||||
for _, e := range entries {
|
|
||||||
if e.IsDir() {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if filepath.Ext(e.Name()) != ".ndjson" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
dbName := strings.TrimSuffix(e.Name(), ".ndjson")
|
|
||||||
snapshotDBs[dbName] = struct{}{}
|
|
||||||
|
|
||||||
dbPath, err := store.FindStore(dbName)
|
|
||||||
if err == nil {
|
|
||||||
_ = os.RemoveAll(dbPath)
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := restoreSnapshot(store, filepath.Join(targetDir, e.Name()), dbName); err != nil {
|
|
||||||
return fmt.Errorf("restore %q: %w", dbName, err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
localDBs, err := store.AllStores()
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
for _, db := range localDBs {
|
|
||||||
if _, ok := snapshotDBs[db]; ok {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
dbPath, err := store.FindStore(db)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if err := os.RemoveAll(dbPath); err != nil {
|
|
||||||
return fmt.Errorf("remove db '%s': %w", db, err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func wipeAllStores(store *Store) error {
|
func wipeAllStores(store *Store) error {
|
||||||
dbs, err := store.AllStores()
|
dbs, err := store.AllStores()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
for _, db := range dbs {
|
for _, db := range dbs {
|
||||||
path, err := store.FindStore(db)
|
p, err := store.storePath(db)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if err := os.RemoveAll(path); err != nil {
|
if err := os.Remove(p); err != nil && !os.IsNotExist(err) {
|
||||||
return fmt.Errorf("remove db '%s': %w", db, err)
|
return fmt.Errorf("cannot remove store '%s': %w", db, err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func restoreSnapshot(store *Store, path string, dbName string) error {
|
|
||||||
f, err := os.Open(path)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
defer f.Close()
|
|
||||||
|
|
||||||
db, err := store.open(dbName)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
defer db.Close()
|
|
||||||
|
|
||||||
decoder := json.NewDecoder(bufio.NewReader(f))
|
|
||||||
wb := db.NewWriteBatch()
|
|
||||||
defer wb.Cancel()
|
|
||||||
|
|
||||||
entryNo := 0
|
|
||||||
for {
|
|
||||||
var entry dumpEntry
|
|
||||||
if err := decoder.Decode(&entry); err != nil {
|
|
||||||
if err == io.EOF {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
return fmt.Errorf("entry %d: %w", entryNo+1, err)
|
|
||||||
}
|
|
||||||
entryNo++
|
|
||||||
if entry.Key == "" {
|
|
||||||
return fmt.Errorf("entry %d: missing key", entryNo)
|
|
||||||
}
|
|
||||||
|
|
||||||
value, err := decodeEntryValue(entry)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("entry %d: %w", entryNo, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
entryMeta := byte(0x0)
|
|
||||||
if entry.Secret {
|
|
||||||
entryMeta = metaSecret
|
|
||||||
}
|
|
||||||
|
|
||||||
writeEntry := badger.NewEntry([]byte(entry.Key), value).WithMeta(entryMeta)
|
|
||||||
if entry.ExpiresAt != nil {
|
|
||||||
if *entry.ExpiresAt < 0 {
|
|
||||||
return fmt.Errorf("entry %d: expires_at must be >= 0", entryNo)
|
|
||||||
}
|
|
||||||
writeEntry.ExpiresAt = uint64(*entry.ExpiresAt)
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := wb.SetEntry(writeEntry); err != nil {
|
|
||||||
return fmt.Errorf("entry %d: %w", entryNo, err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := wb.Flush(); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// hasMergeConflicts returns true if there are files with unresolved merge
|
|
||||||
// conflicts in the working tree.
|
|
||||||
func hasMergeConflicts(dir string) (bool, error) {
|
|
||||||
cmd := exec.Command("git", "diff", "--name-only", "--diff-filter=U")
|
|
||||||
cmd.Dir = dir
|
|
||||||
out, err := cmd.Output()
|
|
||||||
if err != nil {
|
|
||||||
return false, err
|
|
||||||
}
|
|
||||||
return len(bytes.TrimSpace(out)) > 0, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func autoSync() error {
|
|
||||||
if !config.Git.AutoCommit {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return sync(false)
|
|
||||||
}
|
|
||||||
|
|
|
||||||
|
|
@ -28,7 +28,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
version = "pda! 2025.51 release"
|
version = "pda! 2026.14"
|
||||||
)
|
)
|
||||||
|
|
||||||
// versionCmd represents the version command
|
// versionCmd represents the version command
|
||||||
|
|
@ -36,7 +36,8 @@ var versionCmd = &cobra.Command{
|
||||||
Use: "version",
|
Use: "version",
|
||||||
Short: "Display pda! version",
|
Short: "Display pda! version",
|
||||||
Run: func(cmd *cobra.Command, args []string) {
|
Run: func(cmd *cobra.Command, args []string) {
|
||||||
if config.DisplayAsciiArt {
|
short, _ := cmd.Flags().GetBool("short")
|
||||||
|
if !short && config.DisplayAsciiArt {
|
||||||
fmt.Print(asciiArt + "\n ")
|
fmt.Print(asciiArt + "\n ")
|
||||||
}
|
}
|
||||||
fmt.Printf("%s\n", version)
|
fmt.Printf("%s\n", version)
|
||||||
|
|
@ -44,5 +45,6 @@ var versionCmd = &cobra.Command{
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
|
versionCmd.Flags().Bool("short", false, "print only the version string")
|
||||||
rootCmd.AddCommand(versionCmd)
|
rootCmd.AddCommand(versionCmd)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
25
go.mod
25
go.mod
|
|
@ -3,38 +3,27 @@ module github.com/llywelwyn/pda
|
||||||
go 1.25.3
|
go 1.25.3
|
||||||
|
|
||||||
require (
|
require (
|
||||||
|
filippo.io/age v1.3.1
|
||||||
|
github.com/BurntSushi/toml v1.6.0
|
||||||
github.com/agnivade/levenshtein v1.2.1
|
github.com/agnivade/levenshtein v1.2.1
|
||||||
github.com/dgraph-io/badger/v4 v4.8.0
|
|
||||||
github.com/gobwas/glob v0.2.3
|
github.com/gobwas/glob v0.2.3
|
||||||
github.com/google/go-cmdtest v0.4.0
|
github.com/google/go-cmdtest v0.4.0
|
||||||
github.com/jedib0t/go-pretty/v6 v6.7.0
|
github.com/jedib0t/go-pretty/v6 v6.7.0
|
||||||
github.com/muesli/go-app-paths v0.2.2
|
github.com/muesli/go-app-paths v0.2.2
|
||||||
github.com/spf13/cobra v1.10.1
|
github.com/spf13/cobra v1.10.1
|
||||||
golang.org/x/term v0.36.0
|
golang.org/x/term v0.37.0
|
||||||
)
|
)
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/BurntSushi/toml v1.6.0 // indirect
|
filippo.io/hpke v0.4.0 // indirect
|
||||||
github.com/cespare/xxhash/v2 v2.3.0 // indirect
|
|
||||||
github.com/dgraph-io/ristretto/v2 v2.2.0 // indirect
|
|
||||||
github.com/dustin/go-humanize v1.0.1 // indirect
|
|
||||||
github.com/go-logr/logr v1.4.3 // indirect
|
|
||||||
github.com/go-logr/stdr v1.2.2 // indirect
|
|
||||||
github.com/google/flatbuffers v25.2.10+incompatible // indirect
|
|
||||||
github.com/google/go-cmp v0.7.0 // indirect
|
github.com/google/go-cmp v0.7.0 // indirect
|
||||||
github.com/google/renameio v0.1.0 // indirect
|
github.com/google/renameio v0.1.0 // indirect
|
||||||
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||||
github.com/klauspost/compress v1.18.0 // indirect
|
|
||||||
github.com/mattn/go-runewidth v0.0.16 // indirect
|
github.com/mattn/go-runewidth v0.0.16 // indirect
|
||||||
github.com/mitchellh/go-homedir v1.1.0 // indirect
|
github.com/mitchellh/go-homedir v1.1.0 // indirect
|
||||||
github.com/rivo/uniseg v0.4.7 // indirect
|
github.com/rivo/uniseg v0.4.7 // indirect
|
||||||
github.com/spf13/pflag v1.0.9 // indirect
|
github.com/spf13/pflag v1.0.9 // indirect
|
||||||
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
|
golang.org/x/crypto v0.45.0 // indirect
|
||||||
go.opentelemetry.io/otel v1.37.0 // indirect
|
golang.org/x/sys v0.38.0 // indirect
|
||||||
go.opentelemetry.io/otel/metric v1.37.0 // indirect
|
golang.org/x/text v0.31.0 // indirect
|
||||||
go.opentelemetry.io/otel/trace v1.37.0 // indirect
|
|
||||||
golang.org/x/net v0.41.0 // indirect
|
|
||||||
golang.org/x/sys v0.37.0 // indirect
|
|
||||||
golang.org/x/text v0.26.0 // indirect
|
|
||||||
google.golang.org/protobuf v1.36.6 // indirect
|
|
||||||
)
|
)
|
||||||
|
|
|
||||||
51
go.sum
51
go.sum
|
|
@ -1,33 +1,22 @@
|
||||||
|
c2sp.org/CCTV/age v0.0.0-20251208015420-e9274a7bdbfd h1:ZLsPO6WdZ5zatV4UfVpr7oAwLGRZ+sebTUruuM4Ra3M=
|
||||||
|
c2sp.org/CCTV/age v0.0.0-20251208015420-e9274a7bdbfd/go.mod h1:SrHC2C7r5GkDk8R+NFVzYy/sdj0Ypg9htaPXQq5Cqeo=
|
||||||
|
filippo.io/age v1.3.1 h1:hbzdQOJkuaMEpRCLSN1/C5DX74RPcNCk6oqhKMXmZi0=
|
||||||
|
filippo.io/age v1.3.1/go.mod h1:EZorDTYUxt836i3zdori5IJX/v2Lj6kWFU0cfh6C0D4=
|
||||||
|
filippo.io/hpke v0.4.0 h1:p575VVQ6ted4pL+it6M00V/f2qTZITO0zgmdKCkd5+A=
|
||||||
|
filippo.io/hpke v0.4.0/go.mod h1:EmAN849/P3qdeK+PCMkDpDm83vRHM5cDipBJ8xbQLVY=
|
||||||
github.com/BurntSushi/toml v1.6.0 h1:dRaEfpa2VI55EwlIW72hMRHdWouJeRF7TPYhI+AUQjk=
|
github.com/BurntSushi/toml v1.6.0 h1:dRaEfpa2VI55EwlIW72hMRHdWouJeRF7TPYhI+AUQjk=
|
||||||
github.com/BurntSushi/toml v1.6.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho=
|
github.com/BurntSushi/toml v1.6.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho=
|
||||||
github.com/agnivade/levenshtein v1.2.1 h1:EHBY3UOn1gwdy/VbFwgo4cxecRznFk7fKWN1KOX7eoM=
|
github.com/agnivade/levenshtein v1.2.1 h1:EHBY3UOn1gwdy/VbFwgo4cxecRznFk7fKWN1KOX7eoM=
|
||||||
github.com/agnivade/levenshtein v1.2.1/go.mod h1:QVVI16kDrtSuwcpd0p1+xMC6Z/VfhtCyDIjcwga4/DU=
|
github.com/agnivade/levenshtein v1.2.1/go.mod h1:QVVI16kDrtSuwcpd0p1+xMC6Z/VfhtCyDIjcwga4/DU=
|
||||||
github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0 h1:jfIu9sQUG6Ig+0+Ap1h4unLjW6YQJpKZVmUzxsD4E/Q=
|
github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0 h1:jfIu9sQUG6Ig+0+Ap1h4unLjW6YQJpKZVmUzxsD4E/Q=
|
||||||
github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0/go.mod h1:t2tdKJDJF9BV14lnkjHmOQgcvEKgtqs5a1N3LNdJhGE=
|
github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0/go.mod h1:t2tdKJDJF9BV14lnkjHmOQgcvEKgtqs5a1N3LNdJhGE=
|
||||||
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
|
|
||||||
github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
|
||||||
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
|
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
|
||||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
github.com/dgraph-io/badger/v4 v4.8.0 h1:JYph1ChBijCw8SLeybvPINizbDKWZ5n/GYbz2yhN/bs=
|
|
||||||
github.com/dgraph-io/badger/v4 v4.8.0/go.mod h1:U6on6e8k/RTbUWxqKR0MvugJuVmkxSNc79ap4917h4w=
|
|
||||||
github.com/dgraph-io/ristretto/v2 v2.2.0 h1:bkY3XzJcXoMuELV8F+vS8kzNgicwQFAaGINAEJdWGOM=
|
|
||||||
github.com/dgraph-io/ristretto/v2 v2.2.0/go.mod h1:RZrm63UmcBAaYWC1DotLYBmTvgkrs0+XhBd7Npn7/zI=
|
|
||||||
github.com/dgryski/go-farm v0.0.0-20240924180020-3414d57e47da h1:aIftn67I1fkbMa512G+w+Pxci9hJPB8oMnkcP3iZF38=
|
|
||||||
github.com/dgryski/go-farm v0.0.0-20240924180020-3414d57e47da/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw=
|
|
||||||
github.com/dgryski/trifles v0.0.0-20230903005119-f50d829f2e54 h1:SG7nF6SRlWhcT7cNTs5R6Hk4V2lcmLz2NsG2VnInyNo=
|
github.com/dgryski/trifles v0.0.0-20230903005119-f50d829f2e54 h1:SG7nF6SRlWhcT7cNTs5R6Hk4V2lcmLz2NsG2VnInyNo=
|
||||||
github.com/dgryski/trifles v0.0.0-20230903005119-f50d829f2e54/go.mod h1:if7Fbed8SFyPtHLHbg49SI7NAdJiC5WIA09pe59rfAA=
|
github.com/dgryski/trifles v0.0.0-20230903005119-f50d829f2e54/go.mod h1:if7Fbed8SFyPtHLHbg49SI7NAdJiC5WIA09pe59rfAA=
|
||||||
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
|
|
||||||
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
|
|
||||||
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
|
|
||||||
github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI=
|
|
||||||
github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
|
|
||||||
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
|
|
||||||
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
|
|
||||||
github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y=
|
github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y=
|
||||||
github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8=
|
github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8=
|
||||||
github.com/google/flatbuffers v25.2.10+incompatible h1:F3vclr7C3HpB1k9mxCGRMXq6FdUalZ6H/pNX4FP1v0Q=
|
|
||||||
github.com/google/flatbuffers v25.2.10+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8=
|
|
||||||
github.com/google/go-cmdtest v0.4.0 h1:ToXh6W5spLp3npJV92tk6d5hIpUPYEzHLkD+rncbyhI=
|
github.com/google/go-cmdtest v0.4.0 h1:ToXh6W5spLp3npJV92tk6d5hIpUPYEzHLkD+rncbyhI=
|
||||||
github.com/google/go-cmdtest v0.4.0/go.mod h1:apVn/GCasLZUVpAJ6oWAuyP7Ne7CEsQbTnc0plM3m+o=
|
github.com/google/go-cmdtest v0.4.0/go.mod h1:apVn/GCasLZUVpAJ6oWAuyP7Ne7CEsQbTnc0plM3m+o=
|
||||||
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
||||||
|
|
@ -39,8 +28,6 @@ github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2
|
||||||
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
||||||
github.com/jedib0t/go-pretty/v6 v6.7.0 h1:DanoN1RnjXTwDN+B8yqtixXzXqNBCs2Vxo2ARsnrpsY=
|
github.com/jedib0t/go-pretty/v6 v6.7.0 h1:DanoN1RnjXTwDN+B8yqtixXzXqNBCs2Vxo2ARsnrpsY=
|
||||||
github.com/jedib0t/go-pretty/v6 v6.7.0/go.mod h1:YwC5CE4fJ1HFUDeivSV1r//AmANFHyqczZk+U6BDALU=
|
github.com/jedib0t/go-pretty/v6 v6.7.0/go.mod h1:YwC5CE4fJ1HFUDeivSV1r//AmANFHyqczZk+U6BDALU=
|
||||||
github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo=
|
|
||||||
github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ=
|
|
||||||
github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc=
|
github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc=
|
||||||
github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
|
github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
|
||||||
github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=
|
github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=
|
||||||
|
|
@ -59,24 +46,14 @@ github.com/spf13/pflag v1.0.9 h1:9exaQaMOCwffKiiiYk6/BndUBv+iRViNW+4lEMi0PvY=
|
||||||
github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||||
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
|
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
|
||||||
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||||
go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA=
|
golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q=
|
||||||
go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A=
|
golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4=
|
||||||
go.opentelemetry.io/otel v1.37.0 h1:9zhNfelUvx0KBfu/gb+ZgeAfAgtWrfHJZcAqFC228wQ=
|
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
|
||||||
go.opentelemetry.io/otel v1.37.0/go.mod h1:ehE/umFRLnuLa/vSccNq9oS1ErUlkkK71gMcN34UG8I=
|
golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||||
go.opentelemetry.io/otel/metric v1.37.0 h1:mvwbQS5m0tbmqML4NqK+e3aDiO02vsf/WgbsdpcPoZE=
|
golang.org/x/term v0.37.0 h1:8EGAD0qCmHYZg6J17DvsMy9/wJ7/D/4pV/wfnld5lTU=
|
||||||
go.opentelemetry.io/otel/metric v1.37.0/go.mod h1:04wGrZurHYKOc+RKeye86GwKiTb9FKm1WHtO+4EVr2E=
|
golang.org/x/term v0.37.0/go.mod h1:5pB4lxRNYYVZuTLmy8oR2BH8dflOR+IbTYFD8fi3254=
|
||||||
go.opentelemetry.io/otel/trace v1.37.0 h1:HLdcFNbRQBE2imdSEgm/kwqmQj1Or1l/7bW6mxVK7z4=
|
golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM=
|
||||||
go.opentelemetry.io/otel/trace v1.37.0/go.mod h1:TlgrlQ+PtQO5XFerSPUYG0JSgGyryXewPGyayAWSBS0=
|
golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM=
|
||||||
golang.org/x/net v0.41.0 h1:vBTly1HeNPEn3wtREYfy4GZ/NECgw2Cnl+nK6Nz3uvw=
|
|
||||||
golang.org/x/net v0.41.0/go.mod h1:B/K4NNqkfmg07DQYrbwvSluqCJOOXwUjeb/5lOisjbA=
|
|
||||||
golang.org/x/sys v0.37.0 h1:fdNQudmxPjkdUTPnLn5mdQv7Zwvbvpaxqs831goi9kQ=
|
|
||||||
golang.org/x/sys v0.37.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
|
||||||
golang.org/x/term v0.36.0 h1:zMPR+aF8gfksFprF/Nc/rd1wRS1EI6nDBGyWAvDzx2Q=
|
|
||||||
golang.org/x/term v0.36.0/go.mod h1:Qu394IJq6V6dCBRgwqshf3mPF85AqzYEzofzRdZkWss=
|
|
||||||
golang.org/x/text v0.26.0 h1:P42AVeLghgTYr4+xUnTRKDMqpar+PtX7KWuNQL21L8M=
|
|
||||||
golang.org/x/text v0.26.0/go.mod h1:QK15LZJUUQVJxhz7wXgxSy/CJaTFjd0G+YLonydOVQA=
|
|
||||||
google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY=
|
|
||||||
google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY=
|
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
|
|
|
||||||
29
main_test.go
29
main_test.go
|
|
@ -24,18 +24,18 @@ package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"flag"
|
"flag"
|
||||||
|
"os"
|
||||||
"os/exec"
|
"os/exec"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"filippo.io/age"
|
||||||
cmdtest "github.com/google/go-cmdtest"
|
cmdtest "github.com/google/go-cmdtest"
|
||||||
)
|
)
|
||||||
|
|
||||||
var update = flag.Bool("update", false, "update test files with results")
|
var update = flag.Bool("update", false, "update test files with results")
|
||||||
|
|
||||||
func TestMain(t *testing.T) {
|
func TestMain(t *testing.T) {
|
||||||
t.Setenv("PDA_DATA", t.TempDir())
|
|
||||||
t.Setenv("PDA_CONFIG", t.TempDir())
|
|
||||||
ts, err := cmdtest.Read("testdata")
|
ts, err := cmdtest.Read("testdata")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("read testdata: %v", err)
|
t.Fatalf("read testdata: %v", err)
|
||||||
|
|
@ -45,5 +45,30 @@ func TestMain(t *testing.T) {
|
||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
}
|
}
|
||||||
ts.Commands["pda"] = cmdtest.Program(bin)
|
ts.Commands["pda"] = cmdtest.Program(bin)
|
||||||
|
|
||||||
|
// Each .ct file gets its own isolated data and config directories
|
||||||
|
// inside its ROOTDIR, so tests cannot leak state to each other.
|
||||||
|
ts.Setup = func(rootDir string) error {
|
||||||
|
dataDir := filepath.Join(rootDir, "data")
|
||||||
|
configDir := filepath.Join(rootDir, "config")
|
||||||
|
if err := os.MkdirAll(dataDir, 0o755); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := os.MkdirAll(configDir, 0o755); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
os.Setenv("PDA_DATA", dataDir)
|
||||||
|
os.Setenv("PDA_CONFIG", configDir)
|
||||||
|
os.Unsetenv("EDITOR")
|
||||||
|
|
||||||
|
// Pre-create an age identity so encryption tests don't print
|
||||||
|
// a creation message with a non-deterministic path.
|
||||||
|
id, err := age.GenerateX25519Identity()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return os.WriteFile(filepath.Join(dataDir, "identity.txt"), []byte(id.String()+"\n"), 0o600)
|
||||||
|
}
|
||||||
|
|
||||||
ts.Run(t, *update)
|
ts.Run(t, *update)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
18
testdata/config-get.ct
vendored
Normal file
18
testdata/config-get.ct
vendored
Normal file
|
|
@ -0,0 +1,18 @@
|
||||||
|
$ pda config get display_ascii_art
|
||||||
|
true
|
||||||
|
|
||||||
|
$ pda config get store.default_store_name
|
||||||
|
store
|
||||||
|
|
||||||
|
$ pda config get git.auto_commit
|
||||||
|
false
|
||||||
|
|
||||||
|
# Unknown key with suggestion (typo)
|
||||||
|
$ pda config get git.auto_comit --> FAIL
|
||||||
|
FAIL unknown config key 'git.auto_comit'
|
||||||
|
hint did you mean 'git.auto_commit'?
|
||||||
|
|
||||||
|
# Unknown key with suggestion (leaf match, no prefix)
|
||||||
|
$ pda config get auto_commit --> FAIL
|
||||||
|
FAIL unknown config key 'auto_commit'
|
||||||
|
hint did you mean 'git.auto_commit'?
|
||||||
30
testdata/config-init.ct
vendored
Normal file
30
testdata/config-init.ct
vendored
Normal file
|
|
@ -0,0 +1,30 @@
|
||||||
|
# Init creates a config file
|
||||||
|
$ pda config init
|
||||||
|
ok generated config: ${ROOTDIR}/config/config.toml
|
||||||
|
|
||||||
|
# Second init fails
|
||||||
|
$ pda config init --> FAIL
|
||||||
|
FAIL config file already exists
|
||||||
|
hint use '--update' to update your config, or '--new' to get a fresh copy
|
||||||
|
|
||||||
|
# Init --new overwrites
|
||||||
|
$ pda config init --new
|
||||||
|
ok generated config: ${ROOTDIR}/config/config.toml
|
||||||
|
|
||||||
|
# --update preserves user changes
|
||||||
|
$ pda config set list.always_show_all_stores false
|
||||||
|
$ pda config get list.always_show_all_stores
|
||||||
|
ok list.always_show_all_stores set to 'false'
|
||||||
|
false
|
||||||
|
$ pda config init --update
|
||||||
|
$ pda config get list.always_show_all_stores
|
||||||
|
ok updated config: ${ROOTDIR}/config/config.toml
|
||||||
|
false
|
||||||
|
|
||||||
|
# --new and --update are mutually exclusive
|
||||||
|
$ pda config init --new --update --> FAIL
|
||||||
|
FAIL --new and --update are mutually exclusive
|
||||||
|
|
||||||
|
# Reset for other tests
|
||||||
|
$ pda config init --new
|
||||||
|
ok generated config: ${ROOTDIR}/config/config.toml
|
||||||
18
testdata/config-list.ct
vendored
Normal file
18
testdata/config-list.ct
vendored
Normal file
|
|
@ -0,0 +1,18 @@
|
||||||
|
$ pda config list
|
||||||
|
display_ascii_art = true
|
||||||
|
key.always_prompt_delete = false
|
||||||
|
key.always_prompt_glob_delete = true
|
||||||
|
key.always_prompt_overwrite = false
|
||||||
|
key.always_encrypt = false
|
||||||
|
store.default_store_name = store
|
||||||
|
store.always_prompt_delete = true
|
||||||
|
store.always_prompt_overwrite = true
|
||||||
|
list.always_show_all_stores = true
|
||||||
|
list.default_list_format = table
|
||||||
|
list.always_show_full_values = false
|
||||||
|
list.always_hide_header = false
|
||||||
|
list.default_columns = meta,size,ttl,store,key,value
|
||||||
|
git.auto_fetch = false
|
||||||
|
git.auto_commit = false
|
||||||
|
git.auto_push = false
|
||||||
|
git.default_commit_message = {{ summary }} {{ time }}
|
||||||
58
testdata/config-set.ct
vendored
Normal file
58
testdata/config-set.ct
vendored
Normal file
|
|
@ -0,0 +1,58 @@
|
||||||
|
# Set a bool value and verify with get
|
||||||
|
$ pda config set git.auto_commit true
|
||||||
|
$ pda config get git.auto_commit
|
||||||
|
ok git.auto_commit set to 'true'
|
||||||
|
true
|
||||||
|
|
||||||
|
# Set a string value
|
||||||
|
$ pda config set store.default_store_name mystore
|
||||||
|
$ pda config get store.default_store_name
|
||||||
|
ok store.default_store_name set to 'mystore'
|
||||||
|
mystore
|
||||||
|
|
||||||
|
# Set back to original
|
||||||
|
$ pda config set git.auto_commit false
|
||||||
|
$ pda config get git.auto_commit
|
||||||
|
ok git.auto_commit set to 'false'
|
||||||
|
false
|
||||||
|
|
||||||
|
# Bad type
|
||||||
|
$ pda config set git.auto_commit yes --> FAIL
|
||||||
|
FAIL cannot set 'git.auto_commit': expected bool (true/false), got 'yes'
|
||||||
|
|
||||||
|
# Invalid list format
|
||||||
|
$ pda config set list.default_list_format yaml --> FAIL
|
||||||
|
FAIL cannot set 'list.default_list_format': must be one of 'table', 'tsv', 'csv', 'html', 'markdown', 'ndjson', or 'json'
|
||||||
|
|
||||||
|
# Valid list format
|
||||||
|
$ pda config set list.default_list_format json
|
||||||
|
$ pda config get list.default_list_format
|
||||||
|
ok list.default_list_format set to 'json'
|
||||||
|
json
|
||||||
|
|
||||||
|
# Invalid list columns
|
||||||
|
$ pda config set list.default_columns foo --> FAIL
|
||||||
|
FAIL cannot set 'list.default_columns': must be a comma-separated list of 'key', 'store', 'value', 'meta', 'size', 'ttl' (got 'foo')
|
||||||
|
|
||||||
|
# Duplicate columns
|
||||||
|
$ pda config set list.default_columns key,key --> FAIL
|
||||||
|
FAIL cannot set 'list.default_columns': duplicate column 'key'
|
||||||
|
|
||||||
|
# Valid list columns
|
||||||
|
$ pda config set list.default_columns key,value
|
||||||
|
$ pda config get list.default_columns
|
||||||
|
ok list.default_columns set to 'key,value'
|
||||||
|
key,value
|
||||||
|
|
||||||
|
# Unknown key
|
||||||
|
$ pda config set git.auto_comit true --> FAIL
|
||||||
|
FAIL unknown config key 'git.auto_comit'
|
||||||
|
hint did you mean 'git.auto_commit'?
|
||||||
|
|
||||||
|
# Reset changed values so subsequent tests see defaults
|
||||||
|
$ pda config set store.default_store_name store
|
||||||
|
$ pda config set list.default_list_format table
|
||||||
|
$ pda config set list.default_columns meta,size,ttl,store,key,value
|
||||||
|
ok store.default_store_name set to 'store'
|
||||||
|
ok list.default_list_format set to 'table'
|
||||||
|
ok list.default_columns set to 'meta,size,ttl,store,key,value'
|
||||||
8
testdata/cp-cross-store.ct
vendored
Normal file
8
testdata/cp-cross-store.ct
vendored
Normal file
|
|
@ -0,0 +1,8 @@
|
||||||
|
# Cross-store copy
|
||||||
|
$ pda set key@src value
|
||||||
|
$ pda cp key@src key@dst
|
||||||
|
ok copied key@src to key@dst
|
||||||
|
$ pda get key@src
|
||||||
|
value
|
||||||
|
$ pda get key@dst
|
||||||
|
value
|
||||||
8
testdata/cp-encrypt.ct
vendored
Normal file
8
testdata/cp-encrypt.ct
vendored
Normal file
|
|
@ -0,0 +1,8 @@
|
||||||
|
# Copy an encrypted key; both keys should decrypt.
|
||||||
|
$ pda set --encrypt secret-key@cpe hidden-value
|
||||||
|
$ pda cp secret-key@cpe copied-key@cpe
|
||||||
|
ok copied secret-key@cpe to copied-key@cpe
|
||||||
|
$ pda get secret-key@cpe
|
||||||
|
hidden-value
|
||||||
|
$ pda get copied-key@cpe
|
||||||
|
hidden-value
|
||||||
3
testdata/cp-missing-err.ct
vendored
Normal file
3
testdata/cp-missing-err.ct
vendored
Normal file
|
|
@ -0,0 +1,3 @@
|
||||||
|
# Copy non-existent key
|
||||||
|
$ pda cp nonexistent dest --> FAIL
|
||||||
|
FAIL cannot move 'nonexistent': no such key
|
||||||
6
testdata/cp-safe.ct
vendored
Normal file
6
testdata/cp-safe.ct
vendored
Normal file
|
|
@ -0,0 +1,6 @@
|
||||||
|
$ pda set src@csf hello
|
||||||
|
$ pda set dst@csf existing
|
||||||
|
$ pda cp src@csf dst@csf --safe
|
||||||
|
info skipped 'dst@csf': already exists
|
||||||
|
$ pda get dst@csf
|
||||||
|
existing
|
||||||
8
testdata/cp.ct
vendored
Normal file
8
testdata/cp.ct
vendored
Normal file
|
|
@ -0,0 +1,8 @@
|
||||||
|
# Basic copy
|
||||||
|
$ pda set source@cpok value
|
||||||
|
$ pda cp source@cpok dest@cpok
|
||||||
|
ok copied source@cpok to dest@cpok
|
||||||
|
$ pda get source@cpok
|
||||||
|
value
|
||||||
|
$ pda get dest@cpok
|
||||||
|
value
|
||||||
2
testdata/del-db__err__with__invalid_db.ct
vendored
2
testdata/del-db__err__with__invalid_db.ct
vendored
|
|
@ -1,2 +0,0 @@
|
||||||
$ pda del-db foo/bar --> FAIL
|
|
||||||
Error: cannot delete-db 'foo/bar': cannot parse db: bad db format, use DB or @DB
|
|
||||||
10
testdata/del__dedupe__ok.ct
vendored
10
testdata/del__dedupe__ok.ct
vendored
|
|
@ -1,10 +0,0 @@
|
||||||
$ pda set foo 1
|
|
||||||
$ pda set bar 2
|
|
||||||
$ pda ls
|
|
||||||
bar 2
|
|
||||||
foo 1
|
|
||||||
$ pda del foo --glob "*"
|
|
||||||
$ pda get bar --> FAIL
|
|
||||||
Error: cannot get 'bar': Key not found
|
|
||||||
$ pda get foo --> FAIL
|
|
||||||
Error: cannot get 'foo': Key not found
|
|
||||||
10
testdata/del__glob__mixed__ok.ct
vendored
10
testdata/del__glob__mixed__ok.ct
vendored
|
|
@ -1,10 +0,0 @@
|
||||||
$ pda set foo 1
|
|
||||||
$ pda set bar1 2
|
|
||||||
$ pda set bar2 3
|
|
||||||
$ pda del foo --glob bar*
|
|
||||||
$ pda get foo --> FAIL
|
|
||||||
Error: cannot get 'foo': Key not found
|
|
||||||
$ pda get bar1 --> FAIL
|
|
||||||
Error: cannot get 'bar1': Key not found
|
|
||||||
$ pda get bar2 --> FAIL
|
|
||||||
Error: cannot get 'bar2': Key not found
|
|
||||||
10
testdata/del__glob__ok.ct
vendored
10
testdata/del__glob__ok.ct
vendored
|
|
@ -1,10 +0,0 @@
|
||||||
$ pda set a1 1
|
|
||||||
$ pda set a2 2
|
|
||||||
$ pda set b1 3
|
|
||||||
$ pda del --glob a*
|
|
||||||
$ pda get a1 --> FAIL
|
|
||||||
Error: cannot get 'a1': Key not found
|
|
||||||
$ pda get a2 --> FAIL
|
|
||||||
Error: cannot get 'a2': Key not found
|
|
||||||
$ pda get b1
|
|
||||||
3
|
|
||||||
7
testdata/del__multiple__ok.ct
vendored
7
testdata/del__multiple__ok.ct
vendored
|
|
@ -1,7 +0,0 @@
|
||||||
$ pda set a 1
|
|
||||||
$ pda set b 2
|
|
||||||
$ pda del a b
|
|
||||||
$ pda get a --> FAIL
|
|
||||||
Error: cannot get 'a': Key not found
|
|
||||||
$ pda get b --> FAIL
|
|
||||||
Error: cannot get 'b': Key not found
|
|
||||||
2
testdata/del__ok.ct
vendored
2
testdata/del__ok.ct
vendored
|
|
@ -1,2 +0,0 @@
|
||||||
$ pda set a b
|
|
||||||
$ pda del a
|
|
||||||
8
testdata/dump__glob__ok.ct
vendored
8
testdata/dump__glob__ok.ct
vendored
|
|
@ -1,8 +0,0 @@
|
||||||
$ pda set a1 1
|
|
||||||
$ pda set a2 2
|
|
||||||
$ pda set b1 3
|
|
||||||
$ pda dump --glob a*
|
|
||||||
{"key":"a1","value":"1","encoding":"text"}
|
|
||||||
{"key":"a2","value":"2","encoding":"text"}
|
|
||||||
$ pda dump --glob c* --> FAIL
|
|
||||||
Error: No matches for pattern 'c*'
|
|
||||||
5
testdata/edit-no-editor-err.ct
vendored
Normal file
5
testdata/edit-no-editor-err.ct
vendored
Normal file
|
|
@ -0,0 +1,5 @@
|
||||||
|
# Error when EDITOR is not set
|
||||||
|
$ pda set hello@e world
|
||||||
|
$ pda edit hello@e --> FAIL
|
||||||
|
FAIL EDITOR not set
|
||||||
|
hint set $EDITOR to your preferred text editor
|
||||||
8
testdata/export-key-filter.ct
vendored
Normal file
8
testdata/export-key-filter.ct
vendored
Normal file
|
|
@ -0,0 +1,8 @@
|
||||||
|
$ pda set a1@ekf 1
|
||||||
|
$ pda set a2@ekf 2
|
||||||
|
$ pda set b1@ekf 3
|
||||||
|
$ pda export ekf --key "a*"
|
||||||
|
{"key":"a1","value":"1","encoding":"text","store":"ekf"}
|
||||||
|
{"key":"a2","value":"2","encoding":"text","store":"ekf"}
|
||||||
|
$ pda export ekf --key "c*" --> FAIL
|
||||||
|
FAIL cannot ls '@ekf': no matches for key pattern 'c*'
|
||||||
8
testdata/export-value-filter.ct
vendored
Normal file
8
testdata/export-value-filter.ct
vendored
Normal file
|
|
@ -0,0 +1,8 @@
|
||||||
|
$ pda set url@evf https://example.com
|
||||||
|
$ fecho tmpval hello world
|
||||||
|
$ pda set greeting@evf < tmpval
|
||||||
|
$ pda set number@evf 42
|
||||||
|
$ pda export evf --value "**https**"
|
||||||
|
{"key":"url","value":"https://example.com","encoding":"text","store":"evf"}
|
||||||
|
$ pda export evf --value "**world**"
|
||||||
|
{"key":"greeting","value":"hello world\n","encoding":"text","store":"evf"}
|
||||||
6
testdata/export.ct
vendored
Normal file
6
testdata/export.ct
vendored
Normal file
|
|
@ -0,0 +1,6 @@
|
||||||
|
# Unfiltered export outputs all entries as NDJSON
|
||||||
|
$ pda set a@exp 1
|
||||||
|
$ pda set b@exp 2
|
||||||
|
$ pda export exp
|
||||||
|
{"key":"a","value":"1","encoding":"text","store":"exp"}
|
||||||
|
{"key":"b","value":"2","encoding":"text","store":"exp"}
|
||||||
4
testdata/get-base64-run.ct
vendored
Normal file
4
testdata/get-base64-run.ct
vendored
Normal file
|
|
@ -0,0 +1,4 @@
|
||||||
|
$ fecho cmd echo hello
|
||||||
|
$ pda set foo@gbr < cmd
|
||||||
|
$ pda get foo@gbr --base64 --run
|
||||||
|
hello
|
||||||
3
testdata/get-base64.ct
vendored
Normal file
3
testdata/get-base64.ct
vendored
Normal file
|
|
@ -0,0 +1,3 @@
|
||||||
|
$ pda set a@gb b
|
||||||
|
$ pda get a@gb --base64
|
||||||
|
b
|
||||||
3
testdata/get-exists.ct
vendored
Normal file
3
testdata/get-exists.ct
vendored
Normal file
|
|
@ -0,0 +1,3 @@
|
||||||
|
$ pda set found@ge "hello"
|
||||||
|
$ pda get found@ge --exists
|
||||||
|
$ pda get missing@ge --exists --> FAIL
|
||||||
2
testdata/get-invalid-store-err.ct
vendored
Normal file
2
testdata/get-invalid-store-err.ct
vendored
Normal file
|
|
@ -0,0 +1,2 @@
|
||||||
|
$ pda get key@foo/bar --> FAIL
|
||||||
|
FAIL cannot get 'key@foo/bar': bad store format, use STORE or @STORE
|
||||||
14
testdata/get-missing-all-flags-err.ct
vendored
Normal file
14
testdata/get-missing-all-flags-err.ct
vendored
Normal file
|
|
@ -0,0 +1,14 @@
|
||||||
|
$ pda get foobar --> FAIL
|
||||||
|
$ pda get foobar --base64 --> FAIL
|
||||||
|
$ pda get foobar --base64 --run --> FAIL
|
||||||
|
$ pda get foobar --base64 --run --secret --> FAIL
|
||||||
|
$ pda get foobar --run --> FAIL
|
||||||
|
$ pda get foobar --run --secret --> FAIL
|
||||||
|
$ pda get foobar --secret --> FAIL
|
||||||
|
FAIL cannot get 'foobar': no such key
|
||||||
|
FAIL cannot get 'foobar': no such key
|
||||||
|
FAIL cannot get 'foobar': no such key
|
||||||
|
FAIL unknown flag: --secret
|
||||||
|
FAIL cannot get 'foobar': no such key
|
||||||
|
FAIL unknown flag: --secret
|
||||||
|
FAIL unknown flag: --secret
|
||||||
2
testdata/get-missing-err.ct
vendored
Normal file
2
testdata/get-missing-err.ct
vendored
Normal file
|
|
@ -0,0 +1,2 @@
|
||||||
|
$ pda get foobar --> FAIL
|
||||||
|
FAIL cannot get 'foobar': no such key
|
||||||
6
testdata/get-run.ct
vendored
Normal file
6
testdata/get-run.ct
vendored
Normal file
|
|
@ -0,0 +1,6 @@
|
||||||
|
$ fecho cmd echo hello
|
||||||
|
$ pda set a@gr < cmd
|
||||||
|
$ pda get a@gr
|
||||||
|
echo hello
|
||||||
|
$ pda get a@gr --run
|
||||||
|
hello
|
||||||
3
testdata/get.ct
vendored
Normal file
3
testdata/get.ct
vendored
Normal file
|
|
@ -0,0 +1,3 @@
|
||||||
|
$ pda set foo@g bar
|
||||||
|
$ pda get foo@g
|
||||||
|
bar
|
||||||
2
testdata/get__err__with__invalid_db.ct
vendored
2
testdata/get__err__with__invalid_db.ct
vendored
|
|
@ -1,2 +0,0 @@
|
||||||
$ pda get key@foo/bar --> FAIL
|
|
||||||
Error: cannot get 'key@foo/bar': bad db format, use DB or @DB
|
|
||||||
2
testdata/get__missing__err.ct
vendored
2
testdata/get__missing__err.ct
vendored
|
|
@ -1,2 +0,0 @@
|
||||||
$ pda get foobar --> FAIL
|
|
||||||
Error: cannot get 'foobar': Key not found
|
|
||||||
14
testdata/get__missing__err__with__any.ct
vendored
14
testdata/get__missing__err__with__any.ct
vendored
|
|
@ -1,14 +0,0 @@
|
||||||
$ pda get foobar --> FAIL
|
|
||||||
$ pda get foobar --include-binary --> FAIL
|
|
||||||
$ pda get foobar --include-binary --run --> FAIL
|
|
||||||
$ pda get foobar --include-binary --run --secret --> FAIL
|
|
||||||
$ pda get foobar --run --> FAIL
|
|
||||||
$ pda get foobar --run --secret --> FAIL
|
|
||||||
$ pda get foobar --secret --> FAIL
|
|
||||||
Error: cannot get 'foobar': Key not found
|
|
||||||
Error: cannot get 'foobar': Key not found
|
|
||||||
Error: cannot get 'foobar': Key not found
|
|
||||||
Error: cannot get 'foobar': Key not found
|
|
||||||
Error: cannot get 'foobar': Key not found
|
|
||||||
Error: cannot get 'foobar': Key not found
|
|
||||||
Error: cannot get 'foobar': Key not found
|
|
||||||
3
testdata/get__ok.ct
vendored
3
testdata/get__ok.ct
vendored
|
|
@ -1,3 +0,0 @@
|
||||||
$ pda set foo bar
|
|
||||||
$ pda get foo
|
|
||||||
bar
|
|
||||||
3
testdata/get__ok__with__binary.ct
vendored
3
testdata/get__ok__with__binary.ct
vendored
|
|
@ -1,3 +0,0 @@
|
||||||
$ pda set a b
|
|
||||||
$ pda get a --include-binary
|
|
||||||
b
|
|
||||||
4
testdata/get__ok__with__binary_run.ct
vendored
4
testdata/get__ok__with__binary_run.ct
vendored
|
|
@ -1,4 +0,0 @@
|
||||||
$ fecho cmd echo hello
|
|
||||||
$ pda set foo < cmd
|
|
||||||
$ pda get foo --include-binary --run
|
|
||||||
hello
|
|
||||||
4
testdata/get__ok__with__binary_run_secret.ct
vendored
4
testdata/get__ok__with__binary_run_secret.ct
vendored
|
|
@ -1,4 +0,0 @@
|
||||||
$ fecho cmd echo hello
|
|
||||||
$ pda set foo < cmd
|
|
||||||
$ pda get foo --include-binary --run --secret
|
|
||||||
hello
|
|
||||||
6
testdata/get__ok__with__run.ct
vendored
6
testdata/get__ok__with__run.ct
vendored
|
|
@ -1,6 +0,0 @@
|
||||||
$ fecho cmd echo hello
|
|
||||||
$ pda set a < cmd
|
|
||||||
$ pda get a
|
|
||||||
echo hello
|
|
||||||
$ pda get a --run
|
|
||||||
hello
|
|
||||||
6
testdata/get__ok__with__run_secret.ct
vendored
6
testdata/get__ok__with__run_secret.ct
vendored
|
|
@ -1,6 +0,0 @@
|
||||||
$ fecho cmd echo hello
|
|
||||||
$ pda set a < cmd
|
|
||||||
$ pda get a
|
|
||||||
echo hello
|
|
||||||
$ pda get a --run --secret
|
|
||||||
hello
|
|
||||||
3
testdata/get__ok__with__secret.ct
vendored
3
testdata/get__ok__with__secret.ct
vendored
|
|
@ -1,3 +0,0 @@
|
||||||
$ pda set foo bar
|
|
||||||
$ pda get foo --secret
|
|
||||||
bar
|
|
||||||
3
testdata/get__secret__err.ct
vendored
3
testdata/get__secret__err.ct
vendored
|
|
@ -1,3 +0,0 @@
|
||||||
$ pda set a b --secret
|
|
||||||
$ pda get a --> FAIL
|
|
||||||
Error: cannot get 'a': marked as secret, run with --secret
|
|
||||||
4
testdata/get__secret__err__with__binary.ct
vendored
4
testdata/get__secret__err__with__binary.ct
vendored
|
|
@ -1,4 +0,0 @@
|
||||||
$ fecho cmd echo hello world
|
|
||||||
$ pda set a --secret < cmd
|
|
||||||
$ pda get a --include-binary --> FAIL
|
|
||||||
Error: cannot get 'a': marked as secret, run with --secret
|
|
||||||
|
|
@ -1,4 +0,0 @@
|
||||||
$ fecho cmd echo hello world
|
|
||||||
$ pda set a --secret < cmd
|
|
||||||
$ pda get a --include-binary --run --> FAIL
|
|
||||||
Error: cannot get 'a': marked as secret, run with --secret
|
|
||||||
4
testdata/get__secret__err__with__run.ct
vendored
4
testdata/get__secret__err__with__run.ct
vendored
|
|
@ -1,4 +0,0 @@
|
||||||
$ fecho cmd echo hello world
|
|
||||||
$ pda set a --secret < cmd
|
|
||||||
$ pda get a --run --> FAIL
|
|
||||||
Error: cannot get 'a': marked as secret, run with --secret
|
|
||||||
|
|
@ -1,4 +0,0 @@
|
||||||
$ fecho cmd echo hello world
|
|
||||||
$ pda set a --secret < cmd
|
|
||||||
$ pda get a --secret --run --include-binary
|
|
||||||
hello world
|
|
||||||
|
|
@ -1,4 +0,0 @@
|
||||||
$ fecho cmd echo hello world
|
|
||||||
$ pda set a --secret < cmd
|
|
||||||
$ pda get a --include-binary --secret
|
|
||||||
echo hello world
|
|
||||||
|
|
@ -1,4 +0,0 @@
|
||||||
$ fecho cmd echo hello world
|
|
||||||
$ pda set a --secret < cmd
|
|
||||||
$ pda get a --run --secret
|
|
||||||
hello world
|
|
||||||
4
testdata/get__secret__ok__with__secret.ct
vendored
4
testdata/get__secret__ok__with__secret.ct
vendored
|
|
@ -1,4 +0,0 @@
|
||||||
$ fecho cmd echo hello world
|
|
||||||
$ pda set a --secret < cmd
|
|
||||||
$ pda get a --secret
|
|
||||||
echo hello world
|
|
||||||
22
testdata/help-export.ct
vendored
Normal file
22
testdata/help-export.ct
vendored
Normal file
|
|
@ -0,0 +1,22 @@
|
||||||
|
$ pda help export
|
||||||
|
$ pda export --help
|
||||||
|
Export store as NDJSON (alias for list --format ndjson)
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
pda export [STORE] [flags]
|
||||||
|
|
||||||
|
Flags:
|
||||||
|
-h, --help help for export
|
||||||
|
-k, --key strings filter keys with glob pattern (repeatable)
|
||||||
|
-s, --store strings filter stores with glob pattern (repeatable)
|
||||||
|
-v, --value strings filter values with glob pattern (repeatable)
|
||||||
|
Export store as NDJSON (alias for list --format ndjson)
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
pda export [STORE] [flags]
|
||||||
|
|
||||||
|
Flags:
|
||||||
|
-h, --help help for export
|
||||||
|
-k, --key strings filter keys with glob pattern (repeatable)
|
||||||
|
-s, --store strings filter stores with glob pattern (repeatable)
|
||||||
|
-v, --value strings filter values with glob pattern (repeatable)
|
||||||
44
testdata/help-get.ct
vendored
Normal file
44
testdata/help-get.ct
vendored
Normal file
|
|
@ -0,0 +1,44 @@
|
||||||
|
$ pda help get
|
||||||
|
$ pda get --help
|
||||||
|
Get the value of a key. Optionally specify a store.
|
||||||
|
|
||||||
|
{{ .TEMPLATES }} can be filled by passing TEMPLATE=VALUE as an
|
||||||
|
additional argument after the initial KEY being fetched.
|
||||||
|
|
||||||
|
For example:
|
||||||
|
pda set greeting 'Hello, {{ .NAME }}!'
|
||||||
|
pda get greeting NAME=World
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
pda get KEY[@STORE] [flags]
|
||||||
|
|
||||||
|
Aliases:
|
||||||
|
get, g
|
||||||
|
|
||||||
|
Flags:
|
||||||
|
-b, --base64 view binary data as base64
|
||||||
|
--exists exit 0 if the key exists, exit 1 if not (no output)
|
||||||
|
-h, --help help for get
|
||||||
|
--no-template directly output template syntax
|
||||||
|
-c, --run execute the result as a shell command
|
||||||
|
Get the value of a key. Optionally specify a store.
|
||||||
|
|
||||||
|
{{ .TEMPLATES }} can be filled by passing TEMPLATE=VALUE as an
|
||||||
|
additional argument after the initial KEY being fetched.
|
||||||
|
|
||||||
|
For example:
|
||||||
|
pda set greeting 'Hello, {{ .NAME }}!'
|
||||||
|
pda get greeting NAME=World
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
pda get KEY[@STORE] [flags]
|
||||||
|
|
||||||
|
Aliases:
|
||||||
|
get, g
|
||||||
|
|
||||||
|
Flags:
|
||||||
|
-b, --base64 view binary data as base64
|
||||||
|
--exists exit 0 if the key exists, exit 1 if not (no output)
|
||||||
|
-h, --help help for get
|
||||||
|
--no-template directly output template syntax
|
||||||
|
-c, --run execute the result as a shell command
|
||||||
26
testdata/help-import.ct
vendored
Normal file
26
testdata/help-import.ct
vendored
Normal file
|
|
@ -0,0 +1,26 @@
|
||||||
|
$ pda help import
|
||||||
|
$ pda import --help
|
||||||
|
Restore key/value pairs from an NDJSON dump
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
pda import [STORE] [flags]
|
||||||
|
|
||||||
|
Flags:
|
||||||
|
--drop drop existing entries before restoring (full replace)
|
||||||
|
-f, --file string path to an NDJSON dump (defaults to stdin)
|
||||||
|
-h, --help help for import
|
||||||
|
-i, --interactive prompt before overwriting existing keys
|
||||||
|
-k, --key strings restore keys matching glob pattern (repeatable)
|
||||||
|
-s, --store strings restore entries from stores matching glob pattern (repeatable)
|
||||||
|
Restore key/value pairs from an NDJSON dump
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
pda import [STORE] [flags]
|
||||||
|
|
||||||
|
Flags:
|
||||||
|
--drop drop existing entries before restoring (full replace)
|
||||||
|
-f, --file string path to an NDJSON dump (defaults to stdin)
|
||||||
|
-h, --help help for import
|
||||||
|
-i, --interactive prompt before overwriting existing keys
|
||||||
|
-k, --key strings restore keys matching glob pattern (repeatable)
|
||||||
|
-s, --store strings restore entries from stores matching glob pattern (repeatable)
|
||||||
26
testdata/help-list-stores.ct
vendored
Normal file
26
testdata/help-list-stores.ct
vendored
Normal file
|
|
@ -0,0 +1,26 @@
|
||||||
|
$ pda help list-stores
|
||||||
|
$ pda list-stores --help
|
||||||
|
List all stores
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
pda list-stores [flags]
|
||||||
|
|
||||||
|
Aliases:
|
||||||
|
list-stores, lss
|
||||||
|
|
||||||
|
Flags:
|
||||||
|
-h, --help help for list-stores
|
||||||
|
--no-header suppress the header row
|
||||||
|
--short only print store names
|
||||||
|
List all stores
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
pda list-stores [flags]
|
||||||
|
|
||||||
|
Aliases:
|
||||||
|
list-stores, lss
|
||||||
|
|
||||||
|
Flags:
|
||||||
|
-h, --help help for list-stores
|
||||||
|
--no-header suppress the header row
|
||||||
|
--short only print store names
|
||||||
68
testdata/help-list.ct
vendored
Normal file
68
testdata/help-list.ct
vendored
Normal file
|
|
@ -0,0 +1,68 @@
|
||||||
|
$ pda help list
|
||||||
|
$ pda list --help
|
||||||
|
List the contents of all stores.
|
||||||
|
|
||||||
|
By default, list shows entries from every store. Pass a store name as a
|
||||||
|
positional argument to narrow to a single store, or use --store/-s with a
|
||||||
|
glob pattern to filter by store name.
|
||||||
|
|
||||||
|
Use --key/-k and --value/-v to filter by key or value glob, and --store/-s
|
||||||
|
to filter by store name. All filters are repeatable and OR'd within the
|
||||||
|
same flag.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
pda list [STORE] [flags]
|
||||||
|
|
||||||
|
Aliases:
|
||||||
|
list, ls
|
||||||
|
|
||||||
|
Flags:
|
||||||
|
-a, --all list across all stores
|
||||||
|
-b, --base64 view binary data as base64
|
||||||
|
-c, --count print only the count of matching entries
|
||||||
|
-o, --format format output format (table|tsv|csv|markdown|html|ndjson|json)
|
||||||
|
-f, --full show full values without truncation
|
||||||
|
-h, --help help for list
|
||||||
|
-k, --key strings filter keys with glob pattern (repeatable)
|
||||||
|
--no-header suppress the header row
|
||||||
|
--no-keys suppress the key column
|
||||||
|
--no-meta suppress the meta column
|
||||||
|
--no-size suppress the size column
|
||||||
|
--no-store suppress the store column
|
||||||
|
--no-ttl suppress the TTL column
|
||||||
|
--no-values suppress the value column
|
||||||
|
-s, --store strings filter stores with glob pattern (repeatable)
|
||||||
|
-v, --value strings filter values with glob pattern (repeatable)
|
||||||
|
List the contents of all stores.
|
||||||
|
|
||||||
|
By default, list shows entries from every store. Pass a store name as a
|
||||||
|
positional argument to narrow to a single store, or use --store/-s with a
|
||||||
|
glob pattern to filter by store name.
|
||||||
|
|
||||||
|
Use --key/-k and --value/-v to filter by key or value glob, and --store/-s
|
||||||
|
to filter by store name. All filters are repeatable and OR'd within the
|
||||||
|
same flag.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
pda list [STORE] [flags]
|
||||||
|
|
||||||
|
Aliases:
|
||||||
|
list, ls
|
||||||
|
|
||||||
|
Flags:
|
||||||
|
-a, --all list across all stores
|
||||||
|
-b, --base64 view binary data as base64
|
||||||
|
-c, --count print only the count of matching entries
|
||||||
|
-o, --format format output format (table|tsv|csv|markdown|html|ndjson|json)
|
||||||
|
-f, --full show full values without truncation
|
||||||
|
-h, --help help for list
|
||||||
|
-k, --key strings filter keys with glob pattern (repeatable)
|
||||||
|
--no-header suppress the header row
|
||||||
|
--no-keys suppress the key column
|
||||||
|
--no-meta suppress the meta column
|
||||||
|
--no-size suppress the size column
|
||||||
|
--no-store suppress the store column
|
||||||
|
--no-ttl suppress the TTL column
|
||||||
|
--no-values suppress the value column
|
||||||
|
-s, --store strings filter stores with glob pattern (repeatable)
|
||||||
|
-v, --value strings filter values with glob pattern (repeatable)
|
||||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue