Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Improve list function with more control over options #33

Merged
merged 12 commits into from
Mar 21, 2024
4 changes: 2 additions & 2 deletions .github/workflows/test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,14 @@ jobs:
test:
strategy:
matrix:
go-version: [1.21.x]
go-version: [1.21.x, 1.22.x]
os: [ubuntu-latest]
runs-on: ${{ matrix.os }}

steps:
- name: Install Linux packages
if: matrix.os == 'ubuntu-latest'
run: sudo add-apt-repository ppa:jonathonf/zfs && sudo apt-get update && sudo apt install -y --no-install-recommends zfsutils-linux
run: sudo apt install -y --no-install-recommends zfsutils-linux

- name: Install Go
uses: actions/setup-go@v4
Expand Down
11 changes: 9 additions & 2 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,7 +1,14 @@
FROM ubuntu:bionic AS golang-zfs
FROM ubuntu:jammy AS golang-zfs

# Install zfsutils
RUN apt-get update && apt-get install -y --no-install-recommends \
RUN apt-get update \
&& apt-get install -y \
software-properties-common \
sudo \
&& add-apt-repository ppa:longsleep/golang-backports \
&& apt-get update \
&& apt-get install -y --no-install-recommends \
golang-go \
zfsutils-linux \
&& rm -rf /var/lib/apt/lists/*

143 changes: 83 additions & 60 deletions dataset.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ package zfs
import (
"fmt"
"strconv"
"strings"
)

// DatasetType is the zfs dataset type
Expand All @@ -26,93 +27,115 @@ type Dataset struct {
Type DatasetType `json:"Type"`
Origin string `json:"Origin"`
Used uint64 `json:"Used"`
Avail uint64 `json:"Avail"`
Available uint64 `json:"Available"`
Mountpoint string `json:"Mountpoint"`
Compression string `json:"Compression"`
Written uint64 `json:"Written"`
Volsize uint64 `json:"Volsize"`
Logicalused uint64 `json:"Logicalused"`
Usedbydataset uint64 `json:"Usedbydataset"`
Quota uint64 `json:"Quota"`
Refquota uint64 `json:"Refquota"`
Referenced uint64 `json:"Referenced"`
ExtraProps map[string]string `json:"ExtraProps"`
}

func datasetFromFields(fields, extraProps []string) (*Dataset, error) {
if len(fields) != len(dsPropList)+len(extraProps) {
return nil, fmt.Errorf("output invalid: %d fields where %d were expected", len(fields), len(dsPropList)+len(extraProps))
}
const (
nameField = iota
propertyField
valueField
)

d := &Dataset{
Name: fields[0],
Type: DatasetType(fields[1]),
func readDatasets(output [][]string, extraProps []string) ([]Dataset, error) {
multiple := len(dsPropList) + len(extraProps)
if len(output)%multiple != 0 {
return nil, fmt.Errorf("output invalid: %d lines where a multiple of %d was expected: %s",
len(output), multiple, strings.Join(output[0], " "),
)
}
fields = setString(&d.Origin, fields[2:])

fields, err := setUint(&d.Used, fields)
if err != nil {
return nil, err
}
fields, err = setUint(&d.Avail, fields)
if err != nil {
return nil, err
}
fields = setString(&d.Mountpoint, fields)
fields = setString(&d.Compression, fields)
fields, err = setUint(&d.Volsize, fields)
if err != nil {
return nil, err
}
fields, err = setUint(&d.Quota, fields)
if err != nil {
return nil, err
}
fields, err = setUint(&d.Referenced, fields)
if err != nil {
return nil, err
}
fields, err = setUint(&d.Written, fields)
if err != nil {
return nil, err
}
fields, err = setUint(&d.Logicalused, fields)
if err != nil {
return nil, err
}
fields, err = setUint(&d.Usedbydataset, fields)
if err != nil {
return nil, err
}
count := len(output) / multiple
curDataset := 0
datasets := make([]Dataset, count)
for i, fields := range output {
if len(fields) != 3 {
return nil, fmt.Errorf("output contains line with %d fields: %s", len(fields), strings.Join(fields, " "))
}

if i > 0 && fields[nameField] != datasets[curDataset].Name {
curDataset++
}

ds := &datasets[curDataset]
ds.Name = fields[nameField]
// Init extra props if needed
if ds.ExtraProps == nil {
ds.ExtraProps = make(map[string]string, len(extraProps))
}

d.ExtraProps = make(map[string]string, len(extraProps))
for i, field := range extraProps {
d.ExtraProps[field] = fields[i]
prop := fields[propertyField]
val := fields[valueField]

var setError error
switch prop {
case PropertyName:
ds.Name = val
case PropertyType:
ds.Type = DatasetType(val)
case PropertyOrigin:
ds.Origin = setString(val)
case PropertyUsed:
ds.Used, setError = setUint(val)
case PropertyAvailable:
ds.Available, setError = setUint(val)
case PropertyMountPoint:
ds.Mountpoint = setString(val)
case PropertyCompression:
ds.Compression = setString(val)
case PropertyWritten:
ds.Written, setError = setUint(val)
case PropertyVolSize:
ds.Volsize, setError = setUint(val)
case PropertyLogicalUsed:
ds.Logicalused, setError = setUint(val)
case PropertyUsedByDataset:
ds.Usedbydataset, setError = setUint(val)
case PropertyQuota:
ds.Quota, setError = setUint(val)
case PropertyRefQuota:
ds.Refquota, setError = setUint(val)
case PropertyReferenced:
ds.Referenced, setError = setUint(val)
default:
if val == PropertyUnset {
ds.ExtraProps[prop] = ""
continue
}
ds.ExtraProps[prop] = val
}
if setError != nil {
return nil, fmt.Errorf("error in dataset %d (%s) field %s [%s]: %w", curDataset, ds.Name, prop, val, setError)
}
}

return d, nil
return datasets, nil
}

func setString(field *string, values []string) []string {
val, values := values[0], values[1:]
func setString(val string) string {
if val == PropertyUnset {
return values
return ""
}
*field = val
return values
return val
}

func setUint(field *uint64, values []string) ([]string, error) {
var val string
val, values = values[0], values[1:]
func setUint(val string) (uint64, error) {
if val == PropertyUnset {
return values, nil
return 0, nil
}

v, err := strconv.ParseUint(val, 10, 64)
if err != nil {
return values, err
return 0, err
}

*field = v
return values, nil
return v, nil
}
81 changes: 81 additions & 0 deletions dataset_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
package zfs

import (
"github.com/stretchr/testify/require"
"testing"
)

func Test_readDatasets(t *testing.T) {
in := splitOutput(testInput)

const prop1 = "nl.test:hiephoi"
const prop2 = "nl.test:eigenschap"

ds, err := readDatasets(in, []string{prop1, prop2})
require.NoError(t, err)
require.Len(t, ds, 3)
require.Equal(t, ds[0].Name, "testpool/ds0")
require.Equal(t, ds[1].Name, "testpool/ds1")
require.Equal(t, ds[2].Name, "testpool/ds10")

for i := range ds {
require.Equal(t, "", ds[i].Origin)
require.NotEmpty(t, ds[i].Name)
require.NotEmpty(t, ds[i].Mountpoint)
require.NotZero(t, ds[i].Referenced)
require.NotZero(t, ds[i].Used)
require.NotZero(t, ds[i].Available)
require.Equal(t, "42", ds[i].ExtraProps[prop1])
require.Equal(t, "ja", ds[i].ExtraProps[prop2])
}
}

const testInput = `testpool/ds0 name testpool/ds0
testpool/ds0 type filesystem
testpool/ds0 origin -
testpool/ds0 used 196416
testpool/ds0 available 186368146928528
testpool/ds0 mountpoint none
testpool/ds0 compression off
testpool/ds0 volsize -
testpool/ds0 quota 0
testpool/ds0 refquota 0
testpool/ds0 referenced 196416
testpool/ds0 written 196416
testpool/ds0 logicalused 43520
testpool/ds0 usedbydataset 196416
testpool/ds0 nl.test:hiephoi 42
testpool/ds0 nl.test:eigenschap ja
testpool/ds1 name testpool/ds1
testpool/ds1 type filesystem
testpool/ds1 origin -
testpool/ds1 used 196416
testpool/ds1 available 186368146928528
testpool/ds1 mountpoint none
testpool/ds1 compression off
testpool/ds1 volsize -
testpool/ds1 quota 0
testpool/ds1 refquota 0
testpool/ds1 referenced 196416
testpool/ds1 written 196416
testpool/ds1 logicalused 43520
testpool/ds1 usedbydataset 196416
testpool/ds1 nl.test:hiephoi 42
testpool/ds1 nl.test:eigenschap ja
testpool/ds10 name testpool/ds10
testpool/ds10 type filesystem
testpool/ds10 origin -
testpool/ds10 used 196416
testpool/ds10 available 186368146928528
testpool/ds10 mountpoint none
testpool/ds10 compression off
testpool/ds10 volsize -
testpool/ds10 quota 0
testpool/ds10 refquota 0
testpool/ds10 referenced 196416
testpool/ds10 written 196416
testpool/ds10 logicalused 43520
testpool/ds10 usedbydataset 196416
testpool/ds10 nl.test:hiephoi 42
testpool/ds10 nl.test:eigenschap ja
`
2 changes: 1 addition & 1 deletion http/client_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ func TestClient_Send(t *testing.T) {
ds, err = zfs.GetDataset(context.Background(), fullNewFs)
require.NoError(t, err)

snaps, err := ds.Snapshots(context.Background())
snaps, err := ds.Snapshots(context.Background(), zfs.ListOptions{})
require.NoError(t, err)
require.Len(t, snaps, 2)
require.Equal(t, fullNewFs+"@lala1", snaps[0].Name)
Expand Down
2 changes: 1 addition & 1 deletion http/http.go
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ func (h *HTTP) init() error {
h.logger.Info("zfs.http.init: Serving", "host", h.config.Host, "port", h.config.Port)
h.httpServer = &http.Server{
Handler: h.router,
BaseContext: func(listener net.Listener) context.Context {
BaseContext: func(_ net.Listener) context.Context {
return h.ctx
},
}
Expand Down
11 changes: 9 additions & 2 deletions http/http_handlers.go
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,11 @@ func zfsExtraProperties(req *http.Request) []string {
}

func (h *HTTP) handleListFilesystems(w http.ResponseWriter, req *http.Request, _ httprouter.Params, logger *slog.Logger) {
list, err := zfs.Filesystems(req.Context(), h.config.ParentDataset, zfsExtraProperties(req)...)
list, err := zfs.ListFilesystems(req.Context(), zfs.ListOptions{
ParentDataset: h.config.ParentDataset,
ExtraProperties: zfsExtraProperties(req),
Recursive: true,
})
switch {
case errors.Is(err, zfs.ErrDatasetNotFound):
logger.Info("zfs.http.handleListFilesystems: Parent dataset not found", "error", err)
Expand Down Expand Up @@ -178,7 +182,10 @@ func (h *HTTP) handleListSnapshots(w http.ResponseWriter, req *http.Request, ps
return
}

list, err := zfs.Snapshots(req.Context(), fmt.Sprintf("%s/%s", h.config.ParentDataset, filesystem), zfsExtraProperties(req)...)
list, err := zfs.ListSnapshots(req.Context(), zfs.ListOptions{
ParentDataset: fmt.Sprintf("%s/%s", h.config.ParentDataset, filesystem),
ExtraProperties: zfsExtraProperties(req),
})
switch {
case errors.Is(err, zfs.ErrDatasetNotFound):
logger.Info("zfs.http.handleListSnapshots: Filesystem not found", "error", err, "filesystem", filesystem)
Expand Down
Loading
Loading