feat: makes ls list all stores by default, with config option to disable. adds --store glob support

This commit is contained in:
Lewis Wynne 2026-02-11 23:04:14 +00:00
parent b6248e409f
commit 55b2e7f6cb
35 changed files with 487 additions and 177 deletions

View file

@ -81,7 +81,7 @@ Key commands:
copy Make a copy of a key
get Get the value of a key
identity Show or create the age encryption identity
list List the contents of a store
list List the contents of all stores
move Move a key
remove Delete one or more keys
run Get the value of a key and execute it
@ -219,22 +219,28 @@ pda rm kitty -y
<p align="center"></p><!-- spacer -->
`pda ls` to see what you've got stored.
`pda ls` to see what you've got stored. By default it lists the contents of all stores. Pass a store name to check only the given store. Checking a specific store is faster than checking everything, but the slowdown should be insignificant unless you have masses of different stores. `store.list_all_stores` can be set to false to list `store.default_store_name` by default.
```bash
pda ls
# Key Value TTL
# name Alice no expiry
# dogs four legged mammals no expiry
# Key Store Value TTL
# dogs default four legged mammals no expiry
# name default Alice no expiry
# Narrow to a single store.
pda ls @default
# Or filter stores by glob pattern.
pda ls --store "prod*"
# Or as CSV.
pda ls --format csv
# Key,Value,TTL
# name,Alice,no expiry
# dogs,four legged mammals,no expiry
# Key,Store,Value,TTL
# dogs,default,four legged mammals,no expiry
# name,default,Alice,no expiry
# Or as a JSON array.
pda ls --format json
# [{"key":"name","value":"Alice","encoding":"text"},{"key":"dogs","value":"four legged mammals","encoding":"text"}]
# [{"key":"dogs","value":"four legged mammals","encoding":"text","store":"default"},{"key":"name","value":"Alice","encoding":"text","store":"default"}]
# Or TSV, Markdown, HTML, NDJSON.
@ -273,19 +279,25 @@ pda export --value "**https**"
<p align="center"></p><!-- spacer -->
`pda import` to import it all back. By default, import merges into the existing store — existing keys are updated and new keys are added.
`pda import` to import it all back. By default, each entry is routed to the store it came from (via the `"store"` field in the NDJSON). If no `"store"` field is present, entries go to the default store. Pass a store name as a positional argument to force all entries into one store. Existing keys are updated and new keys are added.
```bash
# Import with an argument.
# Entries are routed to their original stores.
pda import -f my_backup
# ok restored 2 entries into @default
# ok restored 5 entries
# Force all entries into a specific store by passing a store name.
pda import mystore -f my_backup
# ok restored 5 entries into @mystore
# Or from stdin.
pda import < my_backup
# ok restored 2 entries into @default
# Import only matching keys.
pda import --key "a*" -f my_backup
# Import only entries from matching stores.
pda import --store "prod*" -f my_backup
# Full replace — drop all existing entries before importing.
pda import --drop -f my_backup
```
@ -476,9 +488,9 @@ pda get hello --no-template
### Filtering
`--key`/`-k` and `--value`/`-v` can be used as filters with glob support. `gobwas/glob` is used for matching. Both flags are repeatable, with results matching one-or-more of the keys and one-or-more of the values passed. If a `--key` and `--value` are passed, results must match both of them. If multiple are passed, results must match at least one `--key` and `--value` pattern.
`--key`/`-k`, `--value`/`-v`, and `--store`/`-s` can be used as filters with glob support. `gobwas/glob` is used for matching. All three flags are repeatable, with results matching one-or-more of the patterns passed per flag. When multiple flags are combined, results must satisfy all of them (AND across flags, OR within the same flag).
`--key` and `--value` filters work with `list`, `remove`, `export`, and `import` commands.
`--key`, `--value`, and `--store` filters work with `list`, `export`, `import`, and `remove`. `--value` is not available on `import` or `remove`.
<p align="center"></p><!-- spacer -->
@ -772,10 +784,12 @@ display_ascii_art = true
[key]
always_prompt_delete = false
always_prompt_glob_delete = true
always_prompt_overwrite = false
[store]
default_store_name = "default"
list_all_stores = true
always_prompt_delete = true
always_prompt_overwrite = true

View file

@ -40,11 +40,13 @@ type Config struct {
type KeyConfig struct {
AlwaysPromptDelete bool `toml:"always_prompt_delete"`
AlwaysPromptGlobDelete bool `toml:"always_prompt_glob_delete"`
AlwaysPromptOverwrite bool `toml:"always_prompt_overwrite"`
}
type StoreConfig struct {
DefaultStoreName string `toml:"default_store_name"`
ListAllStores bool `toml:"list_all_stores"`
AlwaysPromptDelete bool `toml:"always_prompt_delete"`
AlwaysPromptOverwrite bool `toml:"always_prompt_overwrite"`
}
@ -78,10 +80,12 @@ func defaultConfig() Config {
DisplayAsciiArt: true,
Key: KeyConfig{
AlwaysPromptDelete: false,
AlwaysPromptGlobDelete: true,
AlwaysPromptOverwrite: false,
},
Store: StoreConfig{
DefaultStoreName: "default",
ListAllStores: true,
AlwaysPromptDelete: true,
AlwaysPromptOverwrite: true,
},

View file

@ -55,12 +55,21 @@ func del(cmd *cobra.Command, args []string) error {
if err != nil {
return err
}
valuePatterns, err := cmd.Flags().GetStringSlice("value")
if err != nil {
return err
}
storePatterns, err := cmd.Flags().GetStringSlice("store")
if err != nil {
return err
}
if len(args) == 0 && len(keyPatterns) == 0 {
hasFilters := len(keyPatterns) > 0 || len(valuePatterns) > 0 || len(storePatterns) > 0
if len(args) == 0 && !hasFilters {
return fmt.Errorf("cannot remove: no keys provided")
}
targets, err := resolveDeleteTargets(store, args, keyPatterns)
targets, err := resolveDeleteTargets(store, args, keyPatterns, valuePatterns, storePatterns)
if err != nil {
return err
}
@ -75,8 +84,9 @@ func del(cmd *cobra.Command, args []string) error {
}
byStore := make(map[string]*storeTargets)
var storeOrder []string
promptGlob := hasFilters && config.Key.AlwaysPromptGlobDelete
for _, target := range targets {
if !yes && (interactive || config.Key.AlwaysPromptDelete) {
if !yes && (interactive || config.Key.AlwaysPromptDelete || promptGlob) {
var confirm string
promptf("remove '%s'? (y/n)", target.display)
if err := scanln(&confirm); err != nil {
@ -126,6 +136,8 @@ func init() {
delCmd.Flags().BoolP("interactive", "i", false, "prompt yes/no for each deletion")
delCmd.Flags().BoolP("yes", "y", false, "skip all confirmation prompts")
delCmd.Flags().StringSliceP("key", "k", nil, "delete keys matching glob pattern (repeatable)")
delCmd.Flags().StringSliceP("store", "s", nil, "target stores matching glob pattern (repeatable)")
delCmd.Flags().StringSliceP("value", "v", nil, "delete entries matching value glob pattern (repeatable)")
rootCmd.AddCommand(delCmd)
}
@ -152,7 +164,7 @@ func keyExists(store *Store, arg string) (bool, error) {
return findEntry(entries, spec.Key) >= 0, nil
}
func resolveDeleteTargets(store *Store, exactArgs []string, globPatterns []string) ([]resolvedTarget, error) {
func resolveDeleteTargets(store *Store, exactArgs []string, globPatterns []string, valuePatterns []string, storePatterns []string) ([]resolvedTarget, error) {
targetSet := make(map[string]struct{})
var targets []resolvedTarget
@ -185,16 +197,32 @@ func resolveDeleteTargets(store *Store, exactArgs []string, globPatterns []strin
addTarget(spec)
}
if len(globPatterns) == 0 {
if len(globPatterns) == 0 && len(valuePatterns) == 0 && len(storePatterns) == 0 {
return targets, nil
}
// Resolve --store patterns into a list of target stores.
storeMatchers, err := compileGlobMatchers(storePatterns)
if err != nil {
return nil, fmt.Errorf("cannot remove: %v", err)
}
valueMatchers, err := compileValueMatchers(valuePatterns)
if err != nil {
return nil, fmt.Errorf("cannot remove: %v", err)
}
type compiledPattern struct {
rawArg string
db string
matcher glob.Glob
}
// When --store or --value is given without --key, match all keys.
if len(globPatterns) == 0 {
globPatterns = []string{"**"}
}
var compiled []compiledPattern
for _, raw := range globPatterns {
spec, err := store.parseKey(raw, true)
@ -206,37 +234,50 @@ func resolveDeleteTargets(store *Store, exactArgs []string, globPatterns []strin
if err != nil {
return nil, fmt.Errorf("cannot remove '%s': %v", raw, err)
}
compiled = append(compiled, compiledPattern{
rawArg: raw,
db: spec.DB,
matcher: m,
})
if len(storeMatchers) > 0 && !strings.Contains(raw, "@") {
// --store given and pattern has no explicit @STORE: expand across matching stores.
allStores, err := store.AllStores()
if err != nil {
return nil, fmt.Errorf("cannot remove: %v", err)
}
for _, s := range allStores {
if globMatch(storeMatchers, s) {
compiled = append(compiled, compiledPattern{rawArg: raw, db: s, matcher: m})
}
}
} else {
compiled = append(compiled, compiledPattern{rawArg: raw, db: spec.DB, matcher: m})
}
}
keysByDB := make(map[string][]string)
getKeys := func(db string) ([]string, error) {
if keys, ok := keysByDB[db]; ok {
return keys, nil
entriesByDB := make(map[string][]Entry)
getEntries := func(db string) ([]Entry, error) {
if entries, ok := entriesByDB[db]; ok {
return entries, nil
}
keys, err := store.Keys(db)
p, err := store.storePath(db)
if err != nil {
return nil, err
}
keysByDB[db] = keys
return keys, nil
entries, err := readStoreFile(p, nil)
if err != nil {
return nil, err
}
entriesByDB[db] = entries
return entries, nil
}
for _, p := range compiled {
keys, err := getKeys(p.db)
entries, err := getEntries(p.db)
if err != nil {
return nil, fmt.Errorf("cannot remove '%s': %v", p.rawArg, err)
}
for _, k := range keys {
if p.matcher.Match(k) {
for _, e := range entries {
if p.matcher.Match(e.Key) && valueMatch(valueMatchers, e) {
addTarget(KeySpec{
Raw: k,
RawKey: k,
Key: k,
Raw: e.Key,
RawKey: e.Key,
Key: e.Key,
DB: p.db,
})
}

View file

@ -302,12 +302,18 @@ func configDiffs() []string {
if config.Key.AlwaysPromptDelete != def.Key.AlwaysPromptDelete {
diffs = append(diffs, fmt.Sprintf("key.always_prompt_delete: %v", config.Key.AlwaysPromptDelete))
}
if config.Key.AlwaysPromptGlobDelete != def.Key.AlwaysPromptGlobDelete {
diffs = append(diffs, fmt.Sprintf("key.always_prompt_glob_delete: %v", config.Key.AlwaysPromptGlobDelete))
}
if config.Key.AlwaysPromptOverwrite != def.Key.AlwaysPromptOverwrite {
diffs = append(diffs, fmt.Sprintf("key.always_prompt_overwrite: %v", config.Key.AlwaysPromptOverwrite))
}
if config.Store.DefaultStoreName != def.Store.DefaultStoreName {
diffs = append(diffs, fmt.Sprintf("store.default_store_name: %s", config.Store.DefaultStoreName))
}
if config.Store.ListAllStores != def.Store.ListAllStores {
diffs = append(diffs, fmt.Sprintf("store.list_all_stores: %v", config.Store.ListAllStores))
}
if config.Store.AlwaysPromptDelete != def.Store.AlwaysPromptDelete {
diffs = append(diffs, fmt.Sprintf("store.always_prompt_delete: %v", config.Store.AlwaysPromptDelete))
}

View file

@ -40,6 +40,7 @@ var exportCmd = &cobra.Command{
func init() {
exportCmd.Flags().StringSliceP("key", "k", nil, "filter keys with glob pattern (repeatable)")
exportCmd.Flags().StringSliceP("store", "s", nil, "filter stores with glob pattern (repeatable)")
exportCmd.Flags().StringSliceP("value", "v", nil, "filter values with glob pattern (repeatable)")
rootCmd.AddCommand(exportCmd)
}

View file

@ -28,6 +28,7 @@ import (
"fmt"
"io"
"os"
"slices"
"strconv"
"strings"
"unicode/utf8"
@ -63,6 +64,7 @@ var (
listNoValues bool
listNoTTL bool
listFull bool
listAll bool
listNoHeader bool
listFormat formatEnum = "table"
@ -75,11 +77,22 @@ const (
columnKey columnKind = iota
columnValue
columnTTL
columnStore
)
var listCmd = &cobra.Command{
Use: "list [STORE]",
Short: "List the contents of a store",
Short: "List the contents of all stores",
Long: `List the contents of all stores.
By default, list shows entries from every store. Pass a store name as a
positional argument to narrow to a single store, or use --store/-s with a
glob pattern to filter by store name.
The Store column is always shown so entries can be distinguished across
stores. Use --key/-k and --value/-v to filter by key or value glob, and
--store/-s to filter by store name. All filters are repeatable and OR'd
within the same flag.`,
Aliases: []string{"ls"},
Args: cobra.MaximumNArgs(1),
RunE: list,
@ -88,8 +101,22 @@ var listCmd = &cobra.Command{
func list(cmd *cobra.Command, args []string) error {
store := &Store{}
targetDB := "@" + config.Store.DefaultStoreName
if len(args) == 1 {
storePatterns, err := cmd.Flags().GetStringSlice("store")
if err != nil {
return fmt.Errorf("cannot ls: %v", err)
}
if len(storePatterns) > 0 && len(args) > 0 {
return fmt.Errorf("cannot use --store with a store argument")
}
allStores := len(args) == 0 && (config.Store.ListAllStores || listAll)
var targetDB string
if allStores {
targetDB = "all"
} else if len(args) == 0 {
targetDB = "@" + config.Store.DefaultStoreName
} else {
rawArg := args[0]
dbName, err := store.parseDB(rawArg, false)
if err != nil {
@ -113,6 +140,7 @@ func list(cmd *cobra.Command, args []string) error {
if !listNoKeys {
columns = append(columns, columnKey)
}
columns = append(columns, columnStore)
if !listNoValues {
columns = append(columns, columnValue)
}
@ -138,26 +166,62 @@ func list(cmd *cobra.Command, args []string) error {
return fmt.Errorf("cannot ls '%s': %v", targetDB, err)
}
storeMatchers, err := compileGlobMatchers(storePatterns)
if err != nil {
return fmt.Errorf("cannot ls '%s': %v", targetDB, err)
}
identity, _ := loadIdentity()
var recipient *age.X25519Recipient
if identity != nil {
recipient = identity.Recipient()
}
var entries []Entry
if allStores {
storeNames, err := store.AllStores()
if err != nil {
return fmt.Errorf("cannot ls '%s': %v", targetDB, err)
}
for _, name := range storeNames {
p, err := store.storePath(name)
if err != nil {
return fmt.Errorf("cannot ls '%s': %v", targetDB, err)
}
storeEntries, err := readStoreFile(p, identity)
if err != nil {
return fmt.Errorf("cannot ls '%s': %v", targetDB, err)
}
for i := range storeEntries {
storeEntries[i].StoreName = name
}
entries = append(entries, storeEntries...)
}
slices.SortFunc(entries, func(a, b Entry) int {
if c := strings.Compare(a.Key, b.Key); c != 0 {
return c
}
return strings.Compare(a.StoreName, b.StoreName)
})
} else {
dbName := targetDB[1:] // strip leading '@'
p, err := store.storePath(dbName)
if err != nil {
return fmt.Errorf("cannot ls '%s': %v", targetDB, err)
}
entries, err := readStoreFile(p, identity)
entries, err = readStoreFile(p, identity)
if err != nil {
return fmt.Errorf("cannot ls '%s': %v", targetDB, err)
}
for i := range entries {
entries[i].StoreName = dbName
}
}
// Filter by key glob and value regex
// Filter by key glob, value regex, and store glob
var filtered []Entry
for _, e := range entries {
if globMatch(matchers, e.Key) && valueMatch(valueMatchers, e) {
if globMatch(matchers, e.Key) && valueMatch(valueMatchers, e) && globMatch(storeMatchers, e.StoreName) {
filtered = append(filtered, e)
}
}
@ -167,15 +231,19 @@ func list(cmd *cobra.Command, args []string) error {
return nil
}
if (len(matchers) > 0 || len(valueMatchers) > 0) && len(filtered) == 0 {
switch {
case len(matchers) > 0 && len(valueMatchers) > 0:
return fmt.Errorf("cannot ls '%s': no matches for key pattern %s and value pattern %s", targetDB, formatGlobPatterns(keyPatterns), formatValuePatterns(valuePatterns))
case len(valueMatchers) > 0:
return fmt.Errorf("cannot ls '%s': no matches for value pattern %s", targetDB, formatValuePatterns(valuePatterns))
default:
return fmt.Errorf("cannot ls '%s': no matches for key pattern %s", targetDB, formatGlobPatterns(keyPatterns))
hasFilters := len(matchers) > 0 || len(valueMatchers) > 0 || len(storeMatchers) > 0
if hasFilters && len(filtered) == 0 {
var parts []string
if len(matchers) > 0 {
parts = append(parts, fmt.Sprintf("key pattern %s", formatGlobPatterns(keyPatterns)))
}
if len(valueMatchers) > 0 {
parts = append(parts, fmt.Sprintf("value pattern %s", formatValuePatterns(valuePatterns)))
}
if len(storeMatchers) > 0 {
parts = append(parts, fmt.Sprintf("store pattern %s", formatGlobPatterns(storePatterns)))
}
return fmt.Errorf("cannot ls '%s': no matches for %s", targetDB, strings.Join(parts, " and "))
}
output := cmd.OutOrStdout()
@ -187,6 +255,7 @@ func list(cmd *cobra.Command, args []string) error {
if err != nil {
return fmt.Errorf("cannot ls '%s': %v", targetDB, err)
}
je.Store = e.StoreName
data, err := json.Marshal(je)
if err != nil {
return fmt.Errorf("cannot ls '%s': %v", targetDB, err)
@ -198,15 +267,16 @@ func list(cmd *cobra.Command, args []string) error {
// JSON format: emit a single JSON array
if listFormat.String() == "json" {
var entries []jsonEntry
var jsonEntries []jsonEntry
for _, e := range filtered {
je, err := encodeJsonEntry(e, recipient)
if err != nil {
return fmt.Errorf("cannot ls '%s': %v", targetDB, err)
}
entries = append(entries, je)
je.Store = e.StoreName
jsonEntries = append(jsonEntries, je)
}
data, err := json.Marshal(entries)
data, err := json.Marshal(jsonEntries)
if err != nil {
return fmt.Errorf("cannot ls '%s': %v", targetDB, err)
}
@ -267,6 +337,12 @@ func list(cmd *cobra.Command, args []string) error {
} else {
row = append(row, valueStr)
}
case columnStore:
if tty {
row = append(row, dimStyle.Sprint(e.StoreName))
} else {
row = append(row, e.StoreName)
}
case columnTTL:
ttlStr := formatExpiry(e.ExpiresAt)
if tty && e.ExpiresAt == 0 {
@ -359,6 +435,8 @@ func headerRow(columns []columnKind, tty bool) table.Row {
switch col {
case columnKey:
row = append(row, h("Key"))
case columnStore:
row = append(row, h("Store"))
case columnValue:
row = append(row, h("Value"))
case columnTTL:
@ -370,12 +448,13 @@ func headerRow(columns []columnKind, tty bool) table.Row {
const (
keyColumnWidthCap = 30
storeColumnWidthCap = 20
ttlColumnWidthCap = 20
)
// columnLayout holds the resolved max widths for each column kind.
type columnLayout struct {
key, value, ttl int
key, store, value, ttl int
}
// computeLayout derives column widths from the terminal size and actual
@ -385,11 +464,14 @@ func computeLayout(columns []columnKind, out io.Writer, entries []Entry) columnL
var lay columnLayout
termWidth := detectTerminalWidth(out)
// Scan entries for actual max key/TTL content widths.
// Scan entries for actual max key/store/TTL content widths.
for _, e := range entries {
if w := utf8.RuneCountInString(e.Key); w > lay.key {
lay.key = w
}
if w := utf8.RuneCountInString(e.StoreName); w > lay.store {
lay.store = w
}
if w := utf8.RuneCountInString(formatExpiry(e.ExpiresAt)); w > lay.ttl {
lay.ttl = w
}
@ -397,6 +479,9 @@ func computeLayout(columns []columnKind, out io.Writer, entries []Entry) columnL
if lay.key > keyColumnWidthCap {
lay.key = keyColumnWidthCap
}
if lay.store > storeColumnWidthCap {
lay.store = storeColumnWidthCap
}
if lay.ttl > ttlColumnWidthCap {
lay.ttl = ttlColumnWidthCap
}
@ -417,6 +502,8 @@ func computeLayout(columns []columnKind, out io.Writer, entries []Entry) columnL
switch col {
case columnKey:
lay.value -= lay.key
case columnStore:
lay.value -= lay.store
case columnTTL:
lay.value -= lay.ttl
}
@ -442,6 +529,9 @@ func applyColumnWidths(tw table.Writer, columns []columnKind, out io.Writer, lay
case columnKey:
maxW = lay.key
enforcer = text.Trim
case columnStore:
maxW = lay.store
enforcer = text.Trim
case columnValue:
maxW = lay.value
if full {
@ -496,6 +586,7 @@ func renderTable(tw table.Writer) {
}
func init() {
listCmd.Flags().BoolVarP(&listAll, "all", "a", false, "list across all stores")
listCmd.Flags().BoolVarP(&listBase64, "base64", "b", false, "view binary data as base64")
listCmd.Flags().BoolVarP(&listCount, "count", "c", false, "print only the count of matching entries")
listCmd.Flags().BoolVar(&listNoKeys, "no-keys", false, "suppress the key column")
@ -505,6 +596,7 @@ func init() {
listCmd.Flags().BoolVar(&listNoHeader, "no-header", false, "suppress the header row")
listCmd.Flags().VarP(&listFormat, "format", "o", "output format (table|tsv|csv|markdown|html|ndjson|json)")
listCmd.Flags().StringSliceP("key", "k", nil, "filter keys with glob pattern (repeatable)")
listCmd.Flags().StringSliceP("store", "s", nil, "filter stores with glob pattern (repeatable)")
listCmd.Flags().StringSliceP("value", "v", nil, "filter values with glob pattern (repeatable)")
rootCmd.AddCommand(listCmd)
}

View file

@ -43,6 +43,7 @@ type Entry struct {
ExpiresAt uint64 // Unix timestamp; 0 = never expires
Secret bool // encrypted on disk
Locked bool // secret but no identity available to decrypt
StoreName string // populated by list --all
}
// jsonEntry is the NDJSON on-disk format.
@ -51,6 +52,7 @@ type jsonEntry struct {
Value string `json:"value"`
Encoding string `json:"encoding,omitempty"`
ExpiresAt *int64 `json:"expires_at,omitempty"`
Store string `json:"store,omitempty"`
}
// readStoreFile reads all non-expired entries from an NDJSON file.

View file

@ -46,15 +46,16 @@ var restoreCmd = &cobra.Command{
func restore(cmd *cobra.Command, args []string) error {
store := &Store{}
dbName := config.Store.DefaultStoreName
if len(args) == 1 {
explicitStore := len(args) == 1
targetDB := config.Store.DefaultStoreName
if explicitStore {
parsed, err := store.parseDB(args[0], false)
if err != nil {
return fmt.Errorf("cannot restore '%s': %v", args[0], err)
}
dbName = parsed
targetDB = parsed
}
displayTarget := "@" + dbName
displayTarget := "@" + targetDB
keyPatterns, err := cmd.Flags().GetStringSlice("key")
if err != nil {
@ -65,6 +66,15 @@ func restore(cmd *cobra.Command, args []string) error {
return fmt.Errorf("cannot restore '%s': %v", displayTarget, err)
}
storePatterns, err := cmd.Flags().GetStringSlice("store")
if err != nil {
return fmt.Errorf("cannot restore '%s': %v", displayTarget, err)
}
storeMatchers, err := compileGlobMatchers(storePatterns)
if err != nil {
return fmt.Errorf("cannot restore '%s': %v", displayTarget, err)
}
reader, closer, err := restoreInput(cmd)
if err != nil {
return fmt.Errorf("cannot restore '%s': %v", displayTarget, err)
@ -73,11 +83,6 @@ func restore(cmd *cobra.Command, args []string) error {
defer closer.Close()
}
p, err := store.storePath(dbName)
if err != nil {
return fmt.Errorf("cannot restore '%s': %v", displayTarget, err)
}
decoder := json.NewDecoder(bufio.NewReaderSize(reader, 8*1024*1024))
interactive, err := cmd.Flags().GetBool("interactive")
@ -101,7 +106,6 @@ func restore(cmd *cobra.Command, args []string) error {
if promptOverwrite {
filePath, _ := cmd.Flags().GetString("file")
if strings.TrimSpace(filePath) == "" {
// Data comes from stdin — open /dev/tty for interactive prompts.
tty, err := os.Open("/dev/tty")
if err != nil {
return fmt.Errorf("cannot restore '%s': --interactive requires --file (-f) when reading from stdin on this platform", displayTarget)
@ -111,26 +115,60 @@ func restore(cmd *cobra.Command, args []string) error {
}
}
restored, err := restoreEntries(decoder, p, restoreOpts{
opts := restoreOpts{
matchers: matchers,
storeMatchers: storeMatchers,
promptOverwrite: promptOverwrite,
drop: drop,
identity: identity,
recipient: recipient,
promptReader: promptReader,
})
}
// When a specific store is given, all entries go there (original behaviour).
// Otherwise, route entries to their original store via the "store" field.
if explicitStore {
p, err := store.storePath(targetDB)
if err != nil {
return fmt.Errorf("cannot restore '%s': %v", displayTarget, err)
}
if len(matchers) > 0 && restored == 0 {
return fmt.Errorf("cannot restore '%s': no matches for key pattern %s", displayTarget, formatGlobPatterns(keyPatterns))
restored, err := restoreEntries(decoder, map[string]string{targetDB: p}, targetDB, opts)
if err != nil {
return fmt.Errorf("cannot restore '%s': %v", displayTarget, err)
}
if err := reportRestoreFilters(displayTarget, restored, matchers, keyPatterns, storeMatchers, storePatterns); err != nil {
return err
}
okf("restored %d entries into @%s", restored, targetDB)
} else {
restored, err := restoreEntries(decoder, nil, targetDB, opts)
if err != nil {
return fmt.Errorf("cannot restore: %v", err)
}
if err := reportRestoreFilters(displayTarget, restored, matchers, keyPatterns, storeMatchers, storePatterns); err != nil {
return err
}
okf("restored %d entries", restored)
}
okf("restored %d entries into @%s", restored, dbName)
return autoSync()
}
func reportRestoreFilters(displayTarget string, restored int, matchers []glob.Glob, keyPatterns []string, storeMatchers []glob.Glob, storePatterns []string) error {
hasFilters := len(matchers) > 0 || len(storeMatchers) > 0
if hasFilters && restored == 0 {
var parts []string
if len(matchers) > 0 {
parts = append(parts, fmt.Sprintf("key pattern %s", formatGlobPatterns(keyPatterns)))
}
if len(storeMatchers) > 0 {
parts = append(parts, fmt.Sprintf("store pattern %s", formatGlobPatterns(storePatterns)))
}
return fmt.Errorf("cannot restore '%s': no matches for %s", displayTarget, strings.Join(parts, " and "))
}
return nil
}
func restoreInput(cmd *cobra.Command) (io.Reader, io.Closer, error) {
filePath, err := cmd.Flags().GetString("file")
if err != nil {
@ -148,6 +186,7 @@ func restoreInput(cmd *cobra.Command) (io.Reader, io.Closer, error) {
type restoreOpts struct {
matchers []glob.Glob
storeMatchers []glob.Glob
promptOverwrite bool
drop bool
identity *age.X25519Identity
@ -155,14 +194,49 @@ type restoreOpts struct {
promptReader io.Reader
}
func restoreEntries(decoder *json.Decoder, storePath string, opts restoreOpts) (int, error) {
var existing []Entry
if !opts.drop {
var err error
existing, err = readStoreFile(storePath, opts.identity)
if err != nil {
return 0, err
// restoreEntries decodes NDJSON entries and writes them to store files.
// storePaths maps store names to file paths. If nil, entries are routed to
// their original store (from the "store" field), falling back to defaultDB.
func restoreEntries(decoder *json.Decoder, storePaths map[string]string, defaultDB string, opts restoreOpts) (int, error) {
s := &Store{}
// Per-store accumulator.
type storeAcc struct {
path string
entries []Entry
loaded bool
}
stores := make(map[string]*storeAcc)
getStore := func(dbName string) (*storeAcc, error) {
if acc, ok := stores[dbName]; ok {
return acc, nil
}
var p string
if storePaths != nil {
var ok bool
p, ok = storePaths[dbName]
if !ok {
return nil, fmt.Errorf("unexpected store '%s'", dbName)
}
} else {
var err error
p, err = s.storePath(dbName)
if err != nil {
return nil, err
}
}
acc := &storeAcc{path: p}
if !opts.drop {
existing, err := readStoreFile(p, opts.identity)
if err != nil {
return nil, err
}
acc.entries = existing
}
acc.loaded = true
stores[dbName] = acc
return acc, nil
}
entryNo := 0
@ -183,13 +257,27 @@ func restoreEntries(decoder *json.Decoder, storePath string, opts restoreOpts) (
if !globMatch(opts.matchers, je.Key) {
continue
}
if !globMatch(opts.storeMatchers, je.Store) {
continue
}
// Determine target store.
targetDB := defaultDB
if storePaths == nil && je.Store != "" {
targetDB = je.Store
}
entry, err := decodeJsonEntry(je, opts.identity)
if err != nil {
return 0, fmt.Errorf("entry %d: %w", entryNo, err)
}
idx := findEntry(existing, entry.Key)
acc, err := getStore(targetDB)
if err != nil {
return 0, fmt.Errorf("entry %d: %v", entryNo, err)
}
idx := findEntry(acc.entries, entry.Key)
if opts.promptOverwrite && idx >= 0 {
promptf("overwrite '%s'? (y/n)", entry.Key)
@ -210,24 +298,27 @@ func restoreEntries(decoder *json.Decoder, storePath string, opts restoreOpts) (
}
if idx >= 0 {
existing[idx] = entry
acc.entries[idx] = entry
} else {
existing = append(existing, entry)
acc.entries = append(acc.entries, entry)
}
restored++
}
for _, acc := range stores {
if restored > 0 || opts.drop {
if err := writeStoreFile(storePath, existing, opts.recipient); err != nil {
if err := writeStoreFile(acc.path, acc.entries, opts.recipient); err != nil {
return 0, err
}
}
}
return restored, nil
}
func init() {
restoreCmd.Flags().StringP("file", "f", "", "path to an NDJSON dump (defaults to stdin)")
restoreCmd.Flags().StringSliceP("key", "k", nil, "restore keys matching glob pattern (repeatable)")
restoreCmd.Flags().StringSliceP("store", "s", nil, "restore entries from stores matching glob pattern (repeatable)")
restoreCmd.Flags().BoolP("interactive", "i", false, "prompt before overwriting existing keys")
restoreCmd.Flags().Bool("drop", false, "drop existing entries before restoring (full replace)")
rootCmd.AddCommand(restoreCmd)

View file

@ -2,7 +2,7 @@ $ pda set a1@ekf 1
$ pda set a2@ekf 2
$ pda set b1@ekf 3
$ pda export ekf --key "a*"
{"key":"a1","value":"1","encoding":"text"}
{"key":"a2","value":"2","encoding":"text"}
{"key":"a1","value":"1","encoding":"text","store":"ekf"}
{"key":"a2","value":"2","encoding":"text","store":"ekf"}
$ pda export ekf --key "c*" --> FAIL
FAIL cannot ls '@ekf': no matches for key pattern 'c*'

View file

@ -3,6 +3,6 @@ $ fecho tmpval hello world
$ pda set greeting@evf < tmpval
$ pda set number@evf 42
$ pda export evf --value "**https**"
{"key":"url","value":"https://example.com","encoding":"text"}
{"key":"url","value":"https://example.com","encoding":"text","store":"evf"}
$ pda export evf --value "**world**"
{"key":"greeting","value":"hello world\n","encoding":"text"}
{"key":"greeting","value":"hello world\n","encoding":"text","store":"evf"}

4
testdata/export.ct vendored
View file

@ -2,5 +2,5 @@
$ pda set a@exp 1
$ pda set b@exp 2
$ pda export exp
{"key":"a","value":"1","encoding":"text"}
{"key":"b","value":"2","encoding":"text"}
{"key":"a","value":"1","encoding":"text","store":"exp"}
{"key":"b","value":"2","encoding":"text","store":"exp"}

View file

@ -8,6 +8,7 @@ Usage:
Flags:
-h, --help help for export
-k, --key strings filter keys with glob pattern (repeatable)
-s, --store strings filter stores with glob pattern (repeatable)
-v, --value strings filter values with glob pattern (repeatable)
Export store as NDJSON (alias for list --format ndjson)
@ -17,4 +18,5 @@ Usage:
Flags:
-h, --help help for export
-k, --key strings filter keys with glob pattern (repeatable)
-s, --store strings filter stores with glob pattern (repeatable)
-v, --value strings filter values with glob pattern (repeatable)

View file

@ -11,6 +11,7 @@ Flags:
-h, --help help for import
-i, --interactive prompt before overwriting existing keys
-k, --key strings restore keys matching glob pattern (repeatable)
-s, --store strings restore entries from stores matching glob pattern (repeatable)
Restore key/value pairs from an NDJSON dump
Usage:
@ -22,3 +23,4 @@ Flags:
-h, --help help for import
-i, --interactive prompt before overwriting existing keys
-k, --key strings restore keys matching glob pattern (repeatable)
-s, --store strings restore entries from stores matching glob pattern (repeatable)

26
testdata/help-list.ct vendored
View file

@ -1,6 +1,15 @@
$ pda help list
$ pda list --help
List the contents of a store
List the contents of all stores.
By default, list shows entries from every store. Pass a store name as a
positional argument to narrow to a single store, or use --store/-s with a
glob pattern to filter by store name.
The Store column is always shown so entries can be distinguished across
stores. Use --key/-k and --value/-v to filter by key or value glob, and
--store/-s to filter by store name. All filters are repeatable and OR'd
within the same flag.
Usage:
pda list [STORE] [flags]
@ -9,6 +18,7 @@ Aliases:
list, ls
Flags:
-a, --all list across all stores
-b, --base64 view binary data as base64
-c, --count print only the count of matching entries
-o, --format format output format (table|tsv|csv|markdown|html|ndjson|json) (default table)
@ -19,8 +29,18 @@ Flags:
--no-keys suppress the key column
--no-ttl suppress the TTL column
--no-values suppress the value column
-s, --store strings filter stores with glob pattern (repeatable)
-v, --value strings filter values with glob pattern (repeatable)
List the contents of a store
List the contents of all stores.
By default, list shows entries from every store. Pass a store name as a
positional argument to narrow to a single store, or use --store/-s with a
glob pattern to filter by store name.
The Store column is always shown so entries can be distinguished across
stores. Use --key/-k and --value/-v to filter by key or value glob, and
--store/-s to filter by store name. All filters are repeatable and OR'd
within the same flag.
Usage:
pda list [STORE] [flags]
@ -29,6 +49,7 @@ Aliases:
list, ls
Flags:
-a, --all list across all stores
-b, --base64 view binary data as base64
-c, --count print only the count of matching entries
-o, --format format output format (table|tsv|csv|markdown|html|ndjson|json) (default table)
@ -39,4 +60,5 @@ Flags:
--no-keys suppress the key column
--no-ttl suppress the TTL column
--no-values suppress the value column
-s, --store strings filter stores with glob pattern (repeatable)
-v, --value strings filter values with glob pattern (repeatable)

View file

@ -12,6 +12,8 @@ Flags:
-h, --help help for remove
-i, --interactive prompt yes/no for each deletion
-k, --key strings delete keys matching glob pattern (repeatable)
-s, --store strings target stores matching glob pattern (repeatable)
-v, --value strings delete entries matching value glob pattern (repeatable)
-y, --yes skip all confirmation prompts
Delete one or more keys
@ -25,4 +27,6 @@ Flags:
-h, --help help for remove
-i, --interactive prompt yes/no for each deletion
-k, --key strings delete keys matching glob pattern (repeatable)
-s, --store strings target stores matching glob pattern (repeatable)
-v, --value strings delete entries matching value glob pattern (repeatable)
-y, --yes skip all confirmation prompts

4
testdata/help.ct vendored
View file

@ -16,7 +16,7 @@ Key commands:
copy Make a copy of a key
get Get the value of a key
identity Show or create the age encryption identity
list List the contents of a store
list List the contents of all stores
move Move a key
remove Delete one or more keys
run Get the value of a key and execute it
@ -60,7 +60,7 @@ Key commands:
copy Make a copy of a key
get Get the value of a key
identity Show or create the age encryption identity
list List the contents of a store
list List the contents of all stores
move Move a key
remove Delete one or more keys
run Get the value of a key and execute it

29
testdata/list-all.ct vendored Normal file
View file

@ -0,0 +1,29 @@
# List defaults to all stores
$ pda set lax@laa 1
$ pda set lax@lab 2
$ pda ls --key "lax" --format tsv
Key Store Value TTL
lax laa 1 no expiry
lax lab 2 no expiry
$ pda ls --key "lax" --count
2
$ pda ls --key "lax" --format json
[{"key":"lax","value":"1","encoding":"text","store":"laa"},{"key":"lax","value":"2","encoding":"text","store":"lab"}]
# Positional arg narrows to one store
$ pda ls laa --key "lax" --format tsv
Key Store Value TTL
lax laa 1 no expiry
# --store glob filter
$ pda ls --store "la?" --key "lax" --format tsv
Key Store Value TTL
lax laa 1 no expiry
lax lab 2 no expiry
$ pda ls --store "laa" --key "lax" --format tsv
Key Store Value TTL
lax laa 1 no expiry
# --store cannot be combined with positional arg
$ pda ls --store "laa" laa --> FAIL
FAIL cannot use --store with a store argument
# --store no matches
$ pda ls --store "nonexistent" --key "lax" --> FAIL
FAIL cannot ls 'all': no matches for key pattern 'lax' and store pattern 'nonexistent'

View file

@ -2,6 +2,6 @@
$ pda set a@csv 1
$ pda set b@csv 2
$ pda ls csv --format csv
Key,Value,TTL
a,1,no expiry
b,2,no expiry
Key,Store,Value,TTL
a,csv,1,no expiry
b,csv,2,no expiry

View file

@ -2,4 +2,4 @@
$ pda set a@jf 1
$ pda set b@jf 2
$ pda ls jf --format json
[{"key":"a","value":"1","encoding":"text"},{"key":"b","value":"2","encoding":"text"}]
[{"key":"a","value":"1","encoding":"text","store":"jf"},{"key":"b","value":"2","encoding":"text","store":"jf"}]

View file

@ -2,7 +2,7 @@
$ pda set a@md 1
$ pda set b@md 2
$ pda ls md --format markdown
| Key | Value | TTL |
| --- | --- | --- |
| a | 1 | no expiry |
| b | 2 | no expiry |
| Key | Store | Value | TTL |
| --- | --- | --- | --- |
| a | md | 1 | no expiry |
| b | md | 2 | no expiry |

View file

@ -2,5 +2,5 @@
$ pda set a@nj 1
$ pda set b@nj 2
$ pda ls nj --format ndjson
{"key":"a","value":"1","encoding":"text"}
{"key":"b","value":"2","encoding":"text"}
{"key":"a","value":"1","encoding":"text","store":"nj"}
{"key":"b","value":"2","encoding":"text","store":"nj"}

View file

@ -2,11 +2,11 @@ $ pda set a1@lg 1
$ pda set a2@lg 2
$ pda set b1@lg 3
$ pda ls lg --key "a*" --format tsv
Key Value TTL
a1 1 no expiry
a2 2 no expiry
Key Store Value TTL
a1 lg 1 no expiry
a2 lg 2 no expiry
$ pda ls lg --key "b*" --format tsv
Key Value TTL
b1 3 no expiry
Key Store Value TTL
b1 lg 3 no expiry
$ pda ls lg --key "c*" --> FAIL
FAIL cannot ls '@lg': no matches for key pattern 'c*'

View file

@ -2,10 +2,10 @@ $ pda set dburl@kv postgres://localhost:5432
$ pda set apiurl@kv https://api.example.com
$ pda set dbpass@kv s3cret
$ pda ls kv -k "db*" -v "**localhost**" --format tsv
Key Value TTL
dburl postgres://localhost:5432 no expiry
Key Store Value TTL
dburl kv postgres://localhost:5432 no expiry
$ pda ls kv -k "*url*" -v "**example**" --format tsv
Key Value TTL
apiurl https://api.example.com no expiry
Key Store Value TTL
apiurl kv https://api.example.com no expiry
$ pda ls kv -k "db*" -v "**nomatch**" --> FAIL
FAIL cannot ls '@kv': no matches for key pattern 'db*' and value pattern '**nomatch**'

View file

@ -1,4 +1,4 @@
# --no-header suppresses the header row
$ pda set a@nh 1
$ pda ls nh --format tsv --no-header
a 1 no expiry
a nh 1 no expiry

View file

@ -1,5 +1,5 @@
# --no-keys suppresses the key column
$ pda set a@nk 1
$ pda ls nk --format tsv --no-keys
Value TTL
1 no expiry
Store Value TTL
nk 1 no expiry

View file

@ -1,5 +1,5 @@
# --no-ttl suppresses the TTL column
$ pda set a@nt 1
$ pda ls nt --format tsv --no-ttl
Key Value
a 1
Key Store Value
a nt 1

View file

@ -1,5 +1,5 @@
# --no-values suppresses the value column
$ pda set a@nv 1
$ pda ls nv --format tsv --no-values
Key TTL
a no expiry
Key Store TTL
a nv no expiry

View file

@ -2,8 +2,8 @@
$ pda set a@lsalpha 1
$ pda set b@lsbeta 2
$ pda ls lsalpha --format tsv
Key Value TTL
a 1 no expiry
Key Store Value TTL
a lsalpha 1 no expiry
$ pda ls lsbeta --format tsv
Key Value TTL
b 2 no expiry
Key Store Value TTL
b lsbeta 2 no expiry

View file

@ -3,13 +3,13 @@ $ fecho tmpval hello world
$ pda set greeting@vt < tmpval
$ pda set number@vt 42
$ pda ls vt --value "**world**" --format tsv
Key Value TTL
greeting hello world (..1 more chars) no expiry
Key Store Value TTL
greeting vt hello world (..1 more chars) no expiry
$ pda ls vt --value "**https**" --format tsv
Key Value TTL
url https://example.com no expiry
Key Store Value TTL
url vt https://example.com no expiry
$ pda ls vt --value "*" --format tsv
Key Value TTL
number 42 no expiry
Key Store Value TTL
number vt 42 no expiry
$ pda ls vt --value "**nomatch**" --> FAIL
FAIL cannot ls '@vt': no matches for value pattern '**nomatch**'

View file

@ -3,6 +3,6 @@ $ fecho tmpval hello world
$ pda set greeting@vm < tmpval
$ pda set number@vm 42
$ pda ls vm --value "**world**" --value "42" --format tsv
Key Value TTL
greeting hello world (..1 more chars) no expiry
number 42 no expiry
Key Store Value TTL
greeting vm hello world (..1 more chars) no expiry
number vm 42 no expiry

View file

@ -6,5 +6,5 @@ bar
$ pda get x@ms2
y
$ pda ls ms2 --format tsv
Key Value TTL
x y no expiry
Key Store Value TTL
x ms2 y no expiry

View file

@ -2,10 +2,10 @@
$ pda set foo@rdd 1
$ pda set bar@rdd 2
$ pda ls rdd --format tsv
Key Value TTL
bar 2 no expiry
foo 1 no expiry
$ pda rm foo@rdd --key "*@rdd"
Key Store Value TTL
bar rdd 2 no expiry
foo rdd 1 no expiry
$ pda rm foo@rdd --key "*@rdd" -y
$ pda get bar@rdd --> FAIL
FAIL cannot get 'bar@rdd': no such key
$ pda get foo@rdd --> FAIL

View file

@ -1,7 +1,7 @@
$ pda set a1@rkg 1
$ pda set a2@rkg 2
$ pda set b1@rkg 3
$ pda rm --key "a*@rkg"
$ pda rm --key "a*@rkg" -y
$ pda get a1@rkg --> FAIL
FAIL cannot get 'a1@rkg': no such key
hint did you mean 'b1'?

View file

@ -1,7 +1,7 @@
$ pda set foo@rkm 1
$ pda set bar1@rkm 2
$ pda set bar2@rkm 3
$ pda rm foo@rkm --key "bar*@rkm"
$ pda rm foo@rkm --key "bar*@rkm" -y
$ pda get foo@rkm --> FAIL
FAIL cannot get 'foo@rkm': no such key
$ pda get bar1@rkm --> FAIL

2
testdata/root.ct vendored
View file

@ -15,7 +15,7 @@ Key commands:
copy Make a copy of a key
get Get the value of a key
identity Show or create the age encryption identity
list List the contents of a store
list List the contents of all stores
move Move a key
remove Delete one or more keys
run Get the value of a key and execute it