Compare commits
66 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
731a805a9e | ||
|
b90624d7dd | ||
|
2a75cf9b4e | ||
|
d52cc16c88 | ||
|
112b572dc2 | ||
|
0c0a85f3bb | ||
|
2761535e12 | ||
|
1a72589f6f | ||
|
df0b5d66d7 | ||
|
c5f1cffca5 | ||
|
7eed20e35f | ||
|
e71cab485d | ||
|
568fe7f717 | ||
|
d78c6d5a62 | ||
|
c774b610d4 | ||
|
1e00f8f1cc | ||
|
28eed4fd12 | ||
|
c9ac9d9dd6 | ||
|
a4927a8915 | ||
|
029bdd849d | ||
|
8d21ae79c0 | ||
|
4278763cdb | ||
|
a6165b3a8c | ||
|
0eacd26615 | ||
|
e926bb301c | ||
|
961c455d59 | ||
|
4f601087e1 | ||
|
8b18e444a3 | ||
|
62c52e749e | ||
|
463ff1a7e4 | ||
|
3787944141 | ||
|
584a8e4e2a | ||
|
cdf41938b0 | ||
|
3d02037e3a | ||
|
6e2b4af336 | ||
|
9843f8a9c3 | ||
|
4b8a0a0d18 | ||
|
338c89504f | ||
|
c07eafd087 | ||
|
206e75c597 | ||
|
7a61e5126c | ||
|
0925cbd8ac | ||
|
866f17b86f | ||
|
15e99c3658 | ||
|
6400871749 | ||
|
829009250c | ||
|
d2fe03affc | ||
|
febc162491 | ||
|
d9f29af446 | ||
|
003e04eaf2 | ||
|
723517eb57 | ||
|
5dfd1e3787 | ||
|
b9bea671fa | ||
|
bcced6bc10 | ||
|
9b4b1c3320 | ||
|
7e3268a9c8 | ||
|
8128877bcb | ||
|
adec7cfe07 | ||
|
5b6cf42e10 | ||
|
001baa29bf | ||
|
7bf5ce967f | ||
|
4293c887f8 | ||
|
dc586f61fc | ||
|
4538e7d46b | ||
|
13a3ba36bd | ||
|
9c7e647f9e |
@@ -1,7 +1,10 @@
|
||||
---
|
||||
|
||||
# global settings
|
||||
image: alpine:latest
|
||||
image: alpine:edge
|
||||
|
||||
variables:
|
||||
GOFLAGS: "-buildvcs=false"
|
||||
|
||||
stages:
|
||||
- lint
|
||||
@@ -21,23 +24,13 @@ stages:
|
||||
- merge_requests
|
||||
- tags
|
||||
|
||||
# device documentation
|
||||
gofmt linting:
|
||||
stage: lint
|
||||
allow_failure: true
|
||||
<<: *only-default
|
||||
before_script:
|
||||
- apk -q add go
|
||||
script:
|
||||
- .gitlab-ci/check_gofmt.sh
|
||||
|
||||
build:
|
||||
stage: build
|
||||
<<: *only-default
|
||||
before_script:
|
||||
- apk -q add go
|
||||
- apk -q add go staticcheck make
|
||||
script:
|
||||
- go build -v
|
||||
- go test ./...
|
||||
- make test
|
||||
- make
|
||||
artifacts:
|
||||
expire_in: 1 week
|
||||
|
@@ -1,11 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
files="$(gofmt -l .)"
|
||||
|
||||
[ -z "$files" ] && exit 0
|
||||
|
||||
# run gofmt to print out the diff of what needs to be changed
|
||||
|
||||
gofmt -d -e .
|
||||
|
||||
exit 1
|
54
Makefile
Normal file
54
Makefile
Normal file
@@ -0,0 +1,54 @@
|
||||
.POSIX:
|
||||
.SUFFIXES:
|
||||
|
||||
PREFIX?=/usr/local
|
||||
BINDIR?=$(PREFIX)/sbin
|
||||
SHAREDIR?=$(PREFIX)/share
|
||||
GO?=go
|
||||
GOFLAGS?=
|
||||
LDFLAGS+=-s -w
|
||||
RM?=rm -f
|
||||
GOTEST=go test -count=1 -race
|
||||
|
||||
GOSRC!=find * -name '*.go'
|
||||
GOSRC+=go.mod go.sum
|
||||
|
||||
all: postmarketos-mkinitfs
|
||||
|
||||
postmarketos-mkinitfs: $(GOSRC)
|
||||
$(GO) build $(GOFLAGS) -ldflags "$(LDFLAGS)" -o postmarketos-mkinitfs
|
||||
|
||||
.PHONY: fmt
|
||||
fmt:
|
||||
gofmt -w .
|
||||
|
||||
test:
|
||||
@if [ `gofmt -l . | wc -l` -ne 0 ]; then \
|
||||
gofmt -d .; \
|
||||
echo "ERROR: source files need reformatting with gofmt"; \
|
||||
exit 1; \
|
||||
fi
|
||||
@staticcheck ./...
|
||||
|
||||
@$(GOTEST) ./...
|
||||
|
||||
clean:
|
||||
$(RM) postmarketos-mkinitfs
|
||||
|
||||
install: $(DOCS) postmarketos-mkinitfs
|
||||
install -Dm755 postmarketos-mkinitfs -t $(DESTDIR)$(BINDIR)/
|
||||
ln -sf postmarketos-mkinitfs $(DESTDIR)$(BINDIR)/mkinitfs
|
||||
|
||||
.PHONY: checkinstall
|
||||
checkinstall:
|
||||
test -e $(DESTDIR)$(BINDIR)/postmarketos-mkinitfs
|
||||
test -L $(DESTDIR)$(BINDIR)/mkinitfs
|
||||
|
||||
RMDIR_IF_EMPTY:=sh -c '! [ -d $$0 ] || ls -1qA $$0 | grep -q . || rmdir $$0'
|
||||
|
||||
uninstall:
|
||||
$(RM) $(DESTDIR)$(BINDIR)/postmarketos-mkinitfs
|
||||
$(RM) $(DESTDIR)$(BINDIR)/mkinitfs
|
||||
${RMDIR_IF_EMPTY} $(DESTDIR)$(BINDIR)
|
||||
|
||||
.PHONY: all clean install uninstall test
|
45
README.md
Normal file
45
README.md
Normal file
@@ -0,0 +1,45 @@
|
||||
`postmarketos-mkinitfs` is a tool for generating an initramfs (and installing
|
||||
it) on postmarketOS.
|
||||
|
||||
## Building
|
||||
|
||||
Building this project requires a Go compiler/toolchain and `make`:
|
||||
|
||||
```
|
||||
$ make
|
||||
```
|
||||
|
||||
To install locally:
|
||||
|
||||
```
|
||||
$ make install
|
||||
```
|
||||
|
||||
Installation prefix can be set in the generally accepted way with setting
|
||||
`PREFIX`:
|
||||
|
||||
```
|
||||
$ make PREFIX=/some/location
|
||||
# make PREFIX=/some/location install
|
||||
```
|
||||
|
||||
Other paths can be modified from the command line as well, see the top section of
|
||||
the `Makefile` for more information.
|
||||
|
||||
Tests (functional and linting) can be executed by using the `test` make target:
|
||||
|
||||
```
|
||||
$ make test
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
The application uses configuration from `/etc/deviceinfo`, and does not support
|
||||
any other options at runtime. It can be run simply by executing:
|
||||
|
||||
```
|
||||
$ postmarketos-mkinitfs
|
||||
```
|
||||
|
||||
For historical reasons, a symlink from `mkinitfs` to `postmarketos-mkinitfs` is
|
||||
also installed by the makefile's `install` target.
|
2
go.mod
2
go.mod
@@ -3,8 +3,6 @@ module gitlab.com/postmarketOS/postmarketos-mkinitfs
|
||||
go 1.16
|
||||
|
||||
require (
|
||||
git.sr.ht/~sircmpwn/getopt v0.0.0-20201218204720-9961a9c6298f
|
||||
github.com/BurntSushi/toml v0.4.0
|
||||
github.com/cavaliercoder/go-cpio v0.0.0-20180626203310-925f9528c45e
|
||||
github.com/klauspost/compress v1.13.3 // indirect
|
||||
github.com/klauspost/pgzip v1.2.5
|
||||
|
21
go.sum
21
go.sum
@@ -1,30 +1,9 @@
|
||||
git.sr.ht/~sircmpwn/getopt v0.0.0-20201218204720-9961a9c6298f h1:f5axCdaRzGDCihN3o1Lq0ydn0VlkhY+11G0JOyY5qss=
|
||||
git.sr.ht/~sircmpwn/getopt v0.0.0-20201218204720-9961a9c6298f/go.mod h1:wMEGFFFNuPos7vHmWXfszqImLppbc0wEhh6JBfJIUgw=
|
||||
github.com/BurntSushi/toml v0.3.2-0.20210614224209-34d990aa228d/go.mod h1:2QZjSXA5e+XyFeCAxxtL8Z4StYUsTquL8ODGPR3C3MA=
|
||||
github.com/BurntSushi/toml v0.3.2-0.20210621044154-20a94d639b8e/go.mod h1:t4zg8TkHfP16Vb3x4WKIw7zVYMit5QFtPEO8lOWxzTg=
|
||||
github.com/BurntSushi/toml v0.3.2-0.20210624061728-01bfc69d1057/go.mod h1:NMj2lD5LfMqcE0w8tnqOsH6944oaqpI1974lrIwerfE=
|
||||
github.com/BurntSushi/toml v0.3.2-0.20210704081116-ccff24ee4463/go.mod h1:EkRrMiQQmfxK6kIldz3QbPlhmVkrjW1RDJUnbDqGYvc=
|
||||
github.com/BurntSushi/toml v0.4.0 h1:qD/r9AL67srjW6O3fcSKZDsXqzBNX6ieSRywr2hRrdE=
|
||||
github.com/BurntSushi/toml v0.4.0/go.mod h1:wtejDu7Q0FhCWAo2aXkywSJyYFg01EDTKozLNCz2JBA=
|
||||
github.com/BurntSushi/toml-test v0.1.1-0.20210620192437-de01089bbf76/go.mod h1:P/PrhmZ37t5llHfDuiouWXtFgqOoQ12SAh9j6EjrBR4=
|
||||
github.com/BurntSushi/toml-test v0.1.1-0.20210624055653-1f6389604dc6/go.mod h1:UAIt+Eo8itMZAAgImXkPGDMYsT1SsJkVdB5TuONl86A=
|
||||
github.com/BurntSushi/toml-test v0.1.1-0.20210704062846-269931e74e3f/go.mod h1:fnFWrIwqgHsEjVsW3RYCJmDo86oq9eiJ9u6bnqhtm2g=
|
||||
github.com/BurntSushi/toml-test v0.1.1-0.20210723065233-facb9eccd4da h1:2QGUaQtV2u8V1USTI883wo+uxtZFAiZ4TCNupHJ98IU=
|
||||
github.com/BurntSushi/toml-test v0.1.1-0.20210723065233-facb9eccd4da/go.mod h1:ve9Q/RRu2vHi42LocPLNvagxuUJh993/95b18bw/Nws=
|
||||
github.com/cavaliercoder/go-cpio v0.0.0-20180626203310-925f9528c45e h1:hHg27A0RSSp2Om9lubZpiMgVbvn39bsUmW9U5h0twqc=
|
||||
github.com/cavaliercoder/go-cpio v0.0.0-20180626203310-925f9528c45e/go.mod h1:oDpT4efm8tSYHXV5tHSdRvBet/b/QzxZ+XyyPehvm3A=
|
||||
github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||
github.com/klauspost/compress v1.13.3 h1:BtAvtV1+h0YwSVwWoYXMREPpYu9VzTJ9QDI1TEg/iQQ=
|
||||
github.com/klauspost/compress v1.13.3/go.mod h1:8dP1Hq4DHOhN9w426knH3Rhby4rFm6D8eO+e+Dq5Gzg=
|
||||
github.com/klauspost/pgzip v1.2.5 h1:qnWYvvKqedOF2ulHpMG72XQol4ILEJ8k2wwRl/Km8oE=
|
||||
github.com/klauspost/pgzip v1.2.5/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c h1:F1jZWGFhYfh0Ci55sIpILtKKK8p3i2/krTr0H1rg74I=
|
||||
golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
zgo.at/zli v0.0.0-20210619044753-e7020a328e59/go.mod h1:HLAc12TjNGT+VRXr76JnsNE3pbooQtwKWhX+RlDjQ2Y=
|
||||
|
56
main_test.go
56
main_test.go
@@ -1,8 +1,10 @@
|
||||
// Copyright 2021 Clayton Craft <clayton@craftyguy.net>
|
||||
// SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
@@ -24,3 +26,57 @@ func TestStripExts(t *testing.T) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func stringSlicesEqual(a []string, b []string) bool {
|
||||
if len(a) != len(b) {
|
||||
return false
|
||||
}
|
||||
for i, v := range a {
|
||||
if v != b[i] {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
var testModuleDep string = `
|
||||
kernel/sound/soc/codecs/snd-soc-msm8916-digital.ko:
|
||||
kernel/net/sched/act_ipt.ko.xz: kernel/net/netfilter/x_tables.ko.xz
|
||||
kernel/drivers/watchdog/watchdog.ko.xz:
|
||||
kernel/drivers/usb/serial/ir-usb.ko.xz: kernel/drivers/usb/serial/usbserial.ko.xz
|
||||
kernel/drivers/gpu/drm/scheduler/gpu-sched.ko.xz:
|
||||
kernel/drivers/hid/hid-alps.ko.xz:
|
||||
kernel/net/netfilter/xt_u32.ko.xz: kernel/net/netfilter/x_tables.ko.xz
|
||||
kernel/net/netfilter/xt_sctp.ko.xz: kernel/net/netfilter/x_tables.ko.xz
|
||||
kernel/drivers/hwmon/gl518sm.ko.xz:
|
||||
kernel/drivers/watchdog/dw_wdt.ko.xz: kernel/drivers/watchdog/watchdog.ko.xz
|
||||
kernel/net/bluetooth/hidp/hidp.ko.xz: kernel/net/bluetooth/bluetooth.ko.xz kernel/net/rfkill/rfkill.ko.xz kernel/crypto/ecdh_generic.ko.xz kernel/crypto/ecc.ko.xz
|
||||
kernel/fs/nls/nls_iso8859-1.ko.xz:
|
||||
kernel/net/vmw_vsock/vmw_vsock_virtio_transport.ko.xz: kernel/net/vmw_vsock/vmw_vsock_virtio_transport_common.ko.xz kernel/drivers/virtio/virtio.ko.xz kernel/drivers/virtio/virtio_ring.ko.xz kernel/net/vmw_vsock/vsock.ko.xz
|
||||
kernel/drivers/gpu/drm/panfrost/panfrost.ko.xz: kernel/drivers/gpu/drm/scheduler/gpu-sched.ko.xz
|
||||
kernel/drivers/gpu/drm/msm/msm.ko: kernel/drivers/gpu/drm/drm_kms_helper.ko
|
||||
`
|
||||
|
||||
func TestGetModuleDeps(t *testing.T) {
|
||||
tables := []struct {
|
||||
in string
|
||||
expected []string
|
||||
}{
|
||||
{"nls-iso8859-1", []string{"kernel/fs/nls/nls_iso8859-1.ko.xz"}},
|
||||
{"gpu_sched", []string{"kernel/drivers/gpu/drm/scheduler/gpu-sched.ko.xz"}},
|
||||
{"dw-wdt", []string{"kernel/drivers/watchdog/dw_wdt.ko.xz",
|
||||
"kernel/drivers/watchdog/watchdog.ko.xz"}},
|
||||
{"gl518sm", []string{"kernel/drivers/hwmon/gl518sm.ko.xz"}},
|
||||
{"msm", []string{"kernel/drivers/gpu/drm/msm/msm.ko",
|
||||
"kernel/drivers/gpu/drm/drm_kms_helper.ko"}},
|
||||
}
|
||||
for _, table := range tables {
|
||||
out, err := getModuleDeps(table.in, strings.NewReader(testModuleDep))
|
||||
if err != nil {
|
||||
t.Errorf("unexpected error with input: %q, error: %q", table.expected, err)
|
||||
}
|
||||
if !stringSlicesEqual(out, table.expected) {
|
||||
t.Errorf("Expected: %q, got: %q", table.expected, out)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -1,26 +1,28 @@
|
||||
// Copyright 2021 Clayton Craft <clayton@craftyguy.net>
|
||||
// SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
package archive
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"compress/flate"
|
||||
"crypto/sha256"
|
||||
"encoding/hex"
|
||||
"github.com/cavaliercoder/go-cpio"
|
||||
"github.com/klauspost/pgzip"
|
||||
"gitlab.com/postmarketOS/postmarketos-mkinitfs/pkgs/misc"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
"sync"
|
||||
"syscall"
|
||||
|
||||
"github.com/cavaliercoder/go-cpio"
|
||||
"github.com/klauspost/pgzip"
|
||||
"gitlab.com/postmarketOS/postmarketos-mkinitfs/pkgs/misc"
|
||||
)
|
||||
|
||||
type Archive struct {
|
||||
Dirs misc.StringSet
|
||||
Files misc.StringSet
|
||||
items archiveItems
|
||||
cpioWriter *cpio.Writer
|
||||
buf *bytes.Buffer
|
||||
}
|
||||
@@ -29,166 +31,184 @@ func New() (*Archive, error) {
|
||||
buf := new(bytes.Buffer)
|
||||
archive := &Archive{
|
||||
cpioWriter: cpio.NewWriter(buf),
|
||||
Files: make(misc.StringSet),
|
||||
Dirs: make(misc.StringSet),
|
||||
buf: buf,
|
||||
}
|
||||
|
||||
return archive, nil
|
||||
}
|
||||
|
||||
type archiveItem struct {
|
||||
sourcePath string
|
||||
header *cpio.Header
|
||||
}
|
||||
|
||||
type archiveItems struct {
|
||||
items []archiveItem
|
||||
sync.RWMutex
|
||||
}
|
||||
|
||||
// Adds the given item to the archiveItems, only if it doesn't already exist in
|
||||
// the list. The items are kept sorted in ascending order.
|
||||
func (a *archiveItems) Add(item archiveItem) {
|
||||
a.Lock()
|
||||
defer a.Unlock()
|
||||
|
||||
if len(a.items) < 1 {
|
||||
// empty list
|
||||
a.items = append(a.items, item)
|
||||
return
|
||||
}
|
||||
|
||||
// find existing item, or index of where new item should go
|
||||
i := sort.Search(len(a.items), func(i int) bool {
|
||||
return strings.Compare(item.header.Name, a.items[i].header.Name) <= 0
|
||||
})
|
||||
|
||||
if i >= len(a.items) {
|
||||
// doesn't exist in list, but would be at the very end
|
||||
a.items = append(a.items, item)
|
||||
return
|
||||
}
|
||||
|
||||
if strings.Compare(a.items[i].header.Name, item.header.Name) == 0 {
|
||||
// already in list
|
||||
return
|
||||
}
|
||||
|
||||
// grow list by 1, shift right at index, and insert new string at index
|
||||
a.items = append(a.items, archiveItem{})
|
||||
copy(a.items[i+1:], a.items[i:])
|
||||
a.items[i] = item
|
||||
}
|
||||
|
||||
// iterate through items and send each one over the returned channel
|
||||
func (a *archiveItems) IterItems() <-chan archiveItem {
|
||||
ch := make(chan archiveItem)
|
||||
go func() {
|
||||
a.RLock()
|
||||
defer a.RUnlock()
|
||||
|
||||
for _, item := range a.items {
|
||||
ch <- item
|
||||
}
|
||||
close(ch)
|
||||
}()
|
||||
return ch
|
||||
}
|
||||
|
||||
func (archive *Archive) Write(path string, mode os.FileMode) error {
|
||||
if err := archive.writeCpio(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := archive.cpioWriter.Close(); err != nil {
|
||||
return err
|
||||
return fmt.Errorf("archive.Write: error closing archive: %w", err)
|
||||
}
|
||||
|
||||
// Write archive to path
|
||||
if err := archive.writeCompressed(path, mode); err != nil {
|
||||
log.Print("Unable to write archive to location: ", path)
|
||||
return err
|
||||
}
|
||||
|
||||
// test the archive to make sure it's valid
|
||||
if err := test(path); err != nil {
|
||||
log.Print("Verification of archive failed!")
|
||||
return err
|
||||
return fmt.Errorf("unable to write archive to location %q: %w", path, err)
|
||||
}
|
||||
|
||||
if err := os.Chmod(path, mode); err != nil {
|
||||
return err
|
||||
return fmt.Errorf("unable to chmod %q to %s: %w", path, mode, err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func checksum(path string) (string, error) {
|
||||
var sum string
|
||||
|
||||
buf := make([]byte, 64*1024)
|
||||
sha256 := sha256.New()
|
||||
fd, err := os.Open(path)
|
||||
defer fd.Close()
|
||||
|
||||
if err != nil {
|
||||
log.Print("Unable to checksum: ", path)
|
||||
return sum, err
|
||||
}
|
||||
|
||||
// Read file in chunks
|
||||
for {
|
||||
bytes, err := fd.Read(buf)
|
||||
if bytes > 0 {
|
||||
_, err := sha256.Write(buf[:bytes])
|
||||
if err != nil {
|
||||
log.Print("Unable to checksum: ", path)
|
||||
return sum, err
|
||||
}
|
||||
}
|
||||
|
||||
if err == io.EOF {
|
||||
break
|
||||
// Adds the given items in the map to the archive. The map format is {source path:dest path}.
|
||||
// Internally this just calls AddItem on each key,value pair in the map.
|
||||
func (archive *Archive) AddItems(paths map[string]string) error {
|
||||
for s, d := range paths {
|
||||
if err := archive.AddItem(s, d); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
sum = hex.EncodeToString(sha256.Sum(nil))
|
||||
return sum, nil
|
||||
return nil
|
||||
}
|
||||
|
||||
func (archive *Archive) AddFile(file string, dest string) error {
|
||||
// Adds the given file or directory at "source" to the archive at "dest"
|
||||
func (archive *Archive) AddItem(source string, dest string) error {
|
||||
|
||||
sourceStat, err := os.Lstat(source)
|
||||
if err != nil {
|
||||
e, ok := err.(*os.PathError)
|
||||
if e.Err == syscall.ENOENT && ok {
|
||||
// doesn't exist in current filesystem, assume it's a new directory
|
||||
return archive.addDir(dest)
|
||||
}
|
||||
return fmt.Errorf("AddItem: failed to get stat for %q: %w", source, err)
|
||||
}
|
||||
|
||||
if sourceStat.Mode()&os.ModeDir != 0 {
|
||||
return archive.addDir(dest)
|
||||
}
|
||||
|
||||
return archive.addFile(source, dest)
|
||||
}
|
||||
|
||||
func (archive *Archive) addFile(source string, dest string) error {
|
||||
if err := archive.addDir(filepath.Dir(dest)); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if archive.Files[file] {
|
||||
// Already written to cpio
|
||||
return nil
|
||||
}
|
||||
|
||||
fileStat, err := os.Lstat(file)
|
||||
sourceStat, err := os.Lstat(source)
|
||||
if err != nil {
|
||||
log.Print("AddFile: failed to stat file: ", file)
|
||||
log.Print("addFile: failed to stat file: ", source)
|
||||
return err
|
||||
}
|
||||
|
||||
// Symlink: write symlink to archive then set 'file' to link target
|
||||
if fileStat.Mode()&os.ModeSymlink != 0 {
|
||||
if sourceStat.Mode()&os.ModeSymlink != 0 {
|
||||
// log.Printf("File %q is a symlink", file)
|
||||
target, err := os.Readlink(file)
|
||||
target, err := os.Readlink(source)
|
||||
if err != nil {
|
||||
log.Print("AddFile: failed to get symlink target: ", file)
|
||||
log.Print("addFile: failed to get symlink target: ", source)
|
||||
return err
|
||||
}
|
||||
|
||||
destFilename := strings.TrimPrefix(dest, "/")
|
||||
hdr := &cpio.Header{
|
||||
Name: destFilename,
|
||||
Linkname: target,
|
||||
Mode: 0644 | cpio.ModeSymlink,
|
||||
Size: int64(len(target)),
|
||||
// Checksum: 1,
|
||||
}
|
||||
if err := archive.cpioWriter.WriteHeader(hdr); err != nil {
|
||||
return err
|
||||
}
|
||||
if _, err = archive.cpioWriter.Write([]byte(target)); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
archive.Files[file] = true
|
||||
archive.items.Add(archiveItem{
|
||||
sourcePath: source,
|
||||
header: &cpio.Header{
|
||||
Name: destFilename,
|
||||
Linkname: target,
|
||||
Mode: 0644 | cpio.ModeSymlink,
|
||||
Size: int64(len(target)),
|
||||
// Checksum: 1,
|
||||
},
|
||||
})
|
||||
|
||||
if filepath.Dir(target) == "." {
|
||||
target = filepath.Join(filepath.Dir(file), target)
|
||||
target = filepath.Join(filepath.Dir(source), target)
|
||||
}
|
||||
// make sure target is an absolute path
|
||||
if !filepath.IsAbs(target) {
|
||||
target, err = misc.RelativeSymlinkTargetToDir(target, filepath.Dir(file))
|
||||
target, err = misc.RelativeSymlinkTargetToDir(target, filepath.Dir(source))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
// TODO: add verbose mode, print stuff like this:
|
||||
// log.Printf("symlink: %q, target: %q", file, target)
|
||||
// write symlink target
|
||||
err = archive.AddFile(target, target)
|
||||
err = archive.addFile(target, target)
|
||||
return err
|
||||
}
|
||||
|
||||
// log.Printf("writing file: %q", file)
|
||||
|
||||
fd, err := os.Open(file)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer fd.Close()
|
||||
|
||||
destFilename := strings.TrimPrefix(dest, "/")
|
||||
hdr := &cpio.Header{
|
||||
Name: destFilename,
|
||||
Mode: cpio.FileMode(fileStat.Mode().Perm()),
|
||||
Size: fileStat.Size(),
|
||||
// Checksum: 1,
|
||||
}
|
||||
if err := archive.cpioWriter.WriteHeader(hdr); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if _, err = io.Copy(archive.cpioWriter, fd); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
archive.Files[file] = true
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Use busybox gzip to test archive
|
||||
func test(path string) error {
|
||||
cmd := exec.Command("busybox", "gzip", "-t", path)
|
||||
cmd.Stdout = os.Stdout
|
||||
cmd.Stderr = os.Stderr
|
||||
if err := cmd.Run(); err != nil {
|
||||
log.Print("'boot-deploy' command failed: ")
|
||||
return err
|
||||
}
|
||||
archive.items.Add(archiveItem{
|
||||
sourcePath: source,
|
||||
header: &cpio.Header{
|
||||
Name: destFilename,
|
||||
Mode: cpio.FileMode(sourceStat.Mode().Perm()),
|
||||
Size: sourceStat.Size(),
|
||||
// Checksum: 1,
|
||||
},
|
||||
})
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -226,29 +246,48 @@ func (archive *Archive) writeCompressed(path string, mode os.FileMode) error {
|
||||
}
|
||||
|
||||
func (archive *Archive) writeCpio() error {
|
||||
// Write any dirs added explicitly
|
||||
for dir := range archive.Dirs {
|
||||
archive.addDir(dir)
|
||||
// having a transient function for actually adding files to the archive
|
||||
// allows the deferred fd.close to run after every copy and prevent having
|
||||
// tons of open file handles until the copying is all done
|
||||
copyToArchive := func(source string, header *cpio.Header) error {
|
||||
|
||||
if err := archive.cpioWriter.WriteHeader(header); err != nil {
|
||||
return fmt.Errorf("archive.writeCpio: unable to write header: %w", err)
|
||||
}
|
||||
|
||||
// don't copy actual dirs into the archive, writing the header is enough
|
||||
if !header.Mode.IsDir() {
|
||||
if header.Mode.IsRegular() {
|
||||
fd, err := os.Open(source)
|
||||
if err != nil {
|
||||
return fmt.Errorf("archive.writeCpio: uname to open file %q, %w", source, err)
|
||||
}
|
||||
defer fd.Close()
|
||||
if _, err := io.Copy(archive.cpioWriter, fd); err != nil {
|
||||
return fmt.Errorf("archive.writeCpio: unable to write out archive: %w", err)
|
||||
}
|
||||
} else if header.Linkname != "" {
|
||||
// the contents of a symlink is just need the link name
|
||||
if _, err := archive.cpioWriter.Write([]byte(header.Linkname)); err != nil {
|
||||
return fmt.Errorf("archive.writeCpio: unable to write out symlink: %w", err)
|
||||
}
|
||||
} else {
|
||||
return fmt.Errorf("archive.writeCpio: unknown type for file: %s", source)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Write files and any missing parent dirs
|
||||
for file, imported := range archive.Files {
|
||||
if imported {
|
||||
continue
|
||||
}
|
||||
if err := archive.AddFile(file, file); err != nil {
|
||||
for i := range archive.items.IterItems() {
|
||||
if err := copyToArchive(i.sourcePath, i.header); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (archive *Archive) addDir(dir string) error {
|
||||
if archive.Dirs[dir] {
|
||||
// Already imported
|
||||
return nil
|
||||
}
|
||||
if dir == "/" {
|
||||
dir = "."
|
||||
}
|
||||
@@ -256,19 +295,13 @@ func (archive *Archive) addDir(dir string) error {
|
||||
subdirs := strings.Split(strings.TrimPrefix(dir, "/"), "/")
|
||||
for i, subdir := range subdirs {
|
||||
path := filepath.Join(strings.Join(subdirs[:i], "/"), subdir)
|
||||
if archive.Dirs[path] {
|
||||
// Subdir already imported
|
||||
continue
|
||||
}
|
||||
err := archive.cpioWriter.WriteHeader(&cpio.Header{
|
||||
Name: path,
|
||||
Mode: cpio.ModeDir | 0755,
|
||||
archive.items.Add(archiveItem{
|
||||
sourcePath: path,
|
||||
header: &cpio.Header{
|
||||
Name: path,
|
||||
Mode: cpio.ModeDir | 0755,
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
archive.Dirs[path] = true
|
||||
// log.Print("wrote dir: ", path)
|
||||
}
|
||||
|
||||
return nil
|
||||
|
189
pkgs/archive/archive_test.go
Normal file
189
pkgs/archive/archive_test.go
Normal file
@@ -0,0 +1,189 @@
|
||||
// Copyright 2022 Clayton Craft <clayton@craftyguy.net>
|
||||
// SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
package archive
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/cavaliercoder/go-cpio"
|
||||
)
|
||||
|
||||
func TestArchiveItemsAdd(t *testing.T) {
|
||||
subtests := []struct {
|
||||
name string
|
||||
inItems []archiveItem
|
||||
inItem archiveItem
|
||||
expected []archiveItem
|
||||
}{
|
||||
{
|
||||
name: "empty list",
|
||||
inItems: []archiveItem{},
|
||||
inItem: archiveItem{
|
||||
sourcePath: "/foo/bar",
|
||||
header: &cpio.Header{Name: "/foo/bar"},
|
||||
},
|
||||
expected: []archiveItem{
|
||||
{
|
||||
sourcePath: "/foo/bar",
|
||||
header: &cpio.Header{Name: "/foo/bar"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "already exists",
|
||||
inItems: []archiveItem{
|
||||
{
|
||||
sourcePath: "/bazz/bar",
|
||||
header: &cpio.Header{Name: "/bazz/bar"},
|
||||
},
|
||||
{
|
||||
sourcePath: "/foo",
|
||||
header: &cpio.Header{Name: "/foo"},
|
||||
},
|
||||
{
|
||||
sourcePath: "/foo/bar",
|
||||
header: &cpio.Header{Name: "/foo/bar"},
|
||||
},
|
||||
},
|
||||
inItem: archiveItem{
|
||||
sourcePath: "/foo",
|
||||
header: &cpio.Header{Name: "/foo"},
|
||||
},
|
||||
expected: []archiveItem{
|
||||
{
|
||||
sourcePath: "/bazz/bar",
|
||||
header: &cpio.Header{Name: "/bazz/bar"},
|
||||
},
|
||||
{
|
||||
sourcePath: "/foo",
|
||||
header: &cpio.Header{Name: "/foo"},
|
||||
},
|
||||
{
|
||||
sourcePath: "/foo/bar",
|
||||
header: &cpio.Header{Name: "/foo/bar"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "add new",
|
||||
inItems: []archiveItem{
|
||||
{
|
||||
sourcePath: "/bazz/bar",
|
||||
header: &cpio.Header{Name: "/bazz/bar"},
|
||||
},
|
||||
{
|
||||
sourcePath: "/foo",
|
||||
header: &cpio.Header{Name: "/foo"},
|
||||
},
|
||||
{
|
||||
sourcePath: "/foo/bar",
|
||||
header: &cpio.Header{Name: "/foo/bar"},
|
||||
},
|
||||
{
|
||||
sourcePath: "/foo/bar1",
|
||||
header: &cpio.Header{Name: "/foo/bar1"},
|
||||
},
|
||||
},
|
||||
inItem: archiveItem{
|
||||
sourcePath: "/foo/bar0",
|
||||
header: &cpio.Header{Name: "/foo/bar0"},
|
||||
},
|
||||
expected: []archiveItem{
|
||||
{
|
||||
sourcePath: "/bazz/bar",
|
||||
header: &cpio.Header{Name: "/bazz/bar"},
|
||||
},
|
||||
{
|
||||
sourcePath: "/foo",
|
||||
header: &cpio.Header{Name: "/foo"},
|
||||
},
|
||||
{
|
||||
sourcePath: "/foo/bar",
|
||||
header: &cpio.Header{Name: "/foo/bar"},
|
||||
},
|
||||
{
|
||||
sourcePath: "/foo/bar0",
|
||||
header: &cpio.Header{Name: "/foo/bar0"},
|
||||
},
|
||||
{
|
||||
sourcePath: "/foo/bar1",
|
||||
header: &cpio.Header{Name: "/foo/bar1"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "add new at beginning",
|
||||
inItems: []archiveItem{
|
||||
{
|
||||
sourcePath: "/foo",
|
||||
header: &cpio.Header{Name: "/foo"},
|
||||
},
|
||||
{
|
||||
sourcePath: "/foo/bar",
|
||||
header: &cpio.Header{Name: "/foo/bar"},
|
||||
},
|
||||
},
|
||||
inItem: archiveItem{
|
||||
sourcePath: "/bazz/bar",
|
||||
header: &cpio.Header{Name: "/bazz/bar"},
|
||||
},
|
||||
expected: []archiveItem{
|
||||
{
|
||||
sourcePath: "/bazz/bar",
|
||||
header: &cpio.Header{Name: "/bazz/bar"},
|
||||
},
|
||||
{
|
||||
sourcePath: "/foo",
|
||||
header: &cpio.Header{Name: "/foo"},
|
||||
},
|
||||
{
|
||||
sourcePath: "/foo/bar",
|
||||
header: &cpio.Header{Name: "/foo/bar"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "add new at end",
|
||||
inItems: []archiveItem{
|
||||
{
|
||||
sourcePath: "/bazz/bar",
|
||||
header: &cpio.Header{Name: "/bazz/bar"},
|
||||
},
|
||||
{
|
||||
sourcePath: "/foo",
|
||||
header: &cpio.Header{Name: "/foo"},
|
||||
},
|
||||
},
|
||||
inItem: archiveItem{
|
||||
sourcePath: "/zzz/bazz",
|
||||
header: &cpio.Header{Name: "/zzz/bazz"},
|
||||
},
|
||||
expected: []archiveItem{
|
||||
{
|
||||
sourcePath: "/bazz/bar",
|
||||
header: &cpio.Header{Name: "/bazz/bar"},
|
||||
},
|
||||
{
|
||||
sourcePath: "/foo",
|
||||
header: &cpio.Header{Name: "/foo"},
|
||||
},
|
||||
{
|
||||
sourcePath: "/zzz/bazz",
|
||||
header: &cpio.Header{Name: "/zzz/bazz"},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, st := range subtests {
|
||||
t.Run(st.name, func(t *testing.T) {
|
||||
a := archiveItems{items: st.inItems}
|
||||
a.Add(st.inItem)
|
||||
if !reflect.DeepEqual(st.expected, a.items) {
|
||||
t.Fatal("expected:", st.expected, " got: ", a.items)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
@@ -1,53 +1,130 @@
|
||||
// Copyright 2021 Clayton Craft <clayton@craftyguy.net>
|
||||
// SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
package deviceinfo
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"github.com/BurntSushi/toml"
|
||||
"bufio"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
"reflect"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Note: fields must be exported (start with capital letter)
|
||||
// https://github.com/BurntSushi/toml/issues/121
|
||||
type DeviceInfo struct {
|
||||
Deviceinfo_append_dtb string
|
||||
Deviceinfo_arch string
|
||||
Deviceinfo_bootimg_append_seandroidenforce string
|
||||
Deviceinfo_bootimg_blobpack string
|
||||
Deviceinfo_bootimg_dtb_second string
|
||||
Deviceinfo_bootimg_mtk_mkimage string
|
||||
Deviceinfo_bootimg_pxa string
|
||||
Deviceinfo_bootimg_qcdt string
|
||||
Deviceinfo_dtb string
|
||||
Deviceinfo_flash_offset_base string
|
||||
Deviceinfo_flash_offset_kernel string
|
||||
Deviceinfo_flash_offset_ramdisk string
|
||||
Deviceinfo_flash_offset_second string
|
||||
Deviceinfo_flash_offset_tags string
|
||||
Deviceinfo_flash_pagesize string
|
||||
Deviceinfo_generate_bootimg string
|
||||
Deviceinfo_generate_legacy_uboot_initfs string
|
||||
Deviceinfo_mesa_driver string
|
||||
Deviceinfo_mkinitfs_postprocess string
|
||||
Deviceinfo_initfs_compression string
|
||||
Deviceinfo_kernel_cmdline string
|
||||
Deviceinfo_legacy_uboot_load_address string
|
||||
Deviceinfo_modules_initfs string
|
||||
Deviceinfo_flash_kernel_on_update string
|
||||
AppendDtb string
|
||||
Arch string
|
||||
UbootBoardname string
|
||||
BootimgAppendSEAndroidEnforce string
|
||||
BootimgBlobpack string
|
||||
BootimgDtbSecond string
|
||||
BootimgMtkMkimage string
|
||||
BootimgPxa string
|
||||
BootimgQcdt string
|
||||
Dtb string
|
||||
FlashKernelOnUpdate string
|
||||
FlashOffsetBase string
|
||||
FlashOffsetKernel string
|
||||
FlashOffsetRamdisk string
|
||||
FlashOffsetSecond string
|
||||
FlashOffsetTags string
|
||||
FlashPagesize string
|
||||
GenerateBootimg string
|
||||
GenerateLegacyUbootInitfs string
|
||||
InitfsCompression string
|
||||
KernelCmdline string
|
||||
LegacyUbootLoadAddress string
|
||||
MesaDriver string
|
||||
MkinitfsPostprocess string
|
||||
ModulesInitfs string
|
||||
}
|
||||
|
||||
func ReadDeviceinfo() (DeviceInfo, error) {
|
||||
file := "/etc/deviceinfo"
|
||||
func ReadDeviceinfo(file string) (DeviceInfo, error) {
|
||||
var deviceinfo DeviceInfo
|
||||
|
||||
_, err := os.Stat(file)
|
||||
fd, err := os.Open(file)
|
||||
if err != nil {
|
||||
return deviceinfo, errors.New("Unable to find deviceinfo: " + file)
|
||||
}
|
||||
|
||||
if _, err := toml.DecodeFile(file, &deviceinfo); err != nil {
|
||||
return deviceinfo, err
|
||||
}
|
||||
defer fd.Close()
|
||||
|
||||
if err := unmarshal(fd, &deviceinfo); err != nil {
|
||||
return deviceinfo, err
|
||||
}
|
||||
|
||||
return deviceinfo, nil
|
||||
}
|
||||
|
||||
// Unmarshals a deviceinfo into a DeviceInfo struct
|
||||
func unmarshal(r io.Reader, devinfo *DeviceInfo) error {
|
||||
s := bufio.NewScanner(r)
|
||||
for s.Scan() {
|
||||
line := s.Text()
|
||||
if strings.HasPrefix(line, "#") {
|
||||
continue
|
||||
}
|
||||
|
||||
// line isn't setting anything, so just ignore it
|
||||
if !strings.Contains(line, "=") {
|
||||
continue
|
||||
}
|
||||
|
||||
// sometimes line has a comment at the end after setting an option
|
||||
line = strings.SplitN(line, "#", 2)[0]
|
||||
line = strings.TrimSpace(line)
|
||||
|
||||
// must support having '=' in the value (e.g. kernel cmdline)
|
||||
parts := strings.SplitN(line, "=", 2)
|
||||
if len(parts) != 2 {
|
||||
return fmt.Errorf("error parsing deviceinfo line, invalid format: %s", line)
|
||||
}
|
||||
|
||||
name, val := parts[0], parts[1]
|
||||
val = strings.ReplaceAll(val, "\"", "")
|
||||
|
||||
if name == "deviceinfo_format_version" && val != "0" {
|
||||
return fmt.Errorf("deviceinfo format version %q is not supported", val)
|
||||
}
|
||||
|
||||
fieldName := nameToField(name)
|
||||
|
||||
if fieldName == "" {
|
||||
return fmt.Errorf("error parsing deviceinfo line, invalid format: %s", line)
|
||||
}
|
||||
|
||||
field := reflect.ValueOf(devinfo).Elem().FieldByName(fieldName)
|
||||
if !field.IsValid() {
|
||||
// an option that meets the deviceinfo "specification", but isn't
|
||||
// one we care about in this module
|
||||
continue
|
||||
}
|
||||
field.SetString(val)
|
||||
}
|
||||
if err := s.Err(); err != nil {
|
||||
log.Print("unable to parse deviceinfo: ", err)
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Convert string into the string format used for DeviceInfo fields.
|
||||
// Note: does not test that the resulting field name is a valid field in the
|
||||
// DeviceInfo struct!
|
||||
func nameToField(name string) string {
|
||||
var field string
|
||||
parts := strings.Split(name, "_")
|
||||
for _, p := range parts {
|
||||
if p == "deviceinfo" {
|
||||
continue
|
||||
}
|
||||
if len(p) < 1 {
|
||||
continue
|
||||
}
|
||||
field = field + strings.ToUpper(p[:1]) + p[1:]
|
||||
}
|
||||
|
||||
return field
|
||||
}
|
||||
|
81
pkgs/deviceinfo/deviceinfo_test.go
Normal file
81
pkgs/deviceinfo/deviceinfo_test.go
Normal file
@@ -0,0 +1,81 @@
|
||||
// Copyright 2021 Clayton Craft <clayton@craftyguy.net>
|
||||
// SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
package deviceinfo
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
// Test conversion of name to DeviceInfo struct field format
|
||||
func TestNameToField(t *testing.T) {
|
||||
tables := []struct {
|
||||
in string
|
||||
expected string
|
||||
}{
|
||||
{"deviceinfo_dtb", "Dtb"},
|
||||
{"dtb", "Dtb"},
|
||||
{"deviceinfo_modules_initfs", "ModulesInitfs"},
|
||||
{"modules_initfs", "ModulesInitfs"},
|
||||
{"deviceinfo_modules_initfs___", "ModulesInitfs"},
|
||||
}
|
||||
|
||||
for _, table := range tables {
|
||||
out := nameToField(table.in)
|
||||
if out != table.expected {
|
||||
t.Errorf("expected: %q, got: %q", table.expected, out)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Test unmarshalling with lines in deviceinfo
|
||||
func TestUnmarshal(t *testing.T) {
|
||||
tables := []struct {
|
||||
// field is just used for reflection within the test, so it must be a
|
||||
// valid DeviceInfo field
|
||||
field string
|
||||
in string
|
||||
expected string
|
||||
}{
|
||||
{"ModulesInitfs", "deviceinfo_modules_initfs=\"panfrost foo bar bazz\"\n", "panfrost foo bar bazz"},
|
||||
{"ModulesInitfs", "deviceinfo_modules_initfs=\"panfrost foo bar bazz\"", "panfrost foo bar bazz"},
|
||||
// line with multiple '='
|
||||
{"KernelCmdline",
|
||||
"deviceinfo_kernel_cmdline=\"PMOS_NO_OUTPUT_REDIRECT fw_devlink=off nvme_core.default_ps_max_latency_us=5500 pcie_aspm.policy=performance\"\n",
|
||||
"PMOS_NO_OUTPUT_REDIRECT fw_devlink=off nvme_core.default_ps_max_latency_us=5500 pcie_aspm.policy=performance"},
|
||||
// empty option
|
||||
{"ModulesInitfs", "deviceinfo_modules_initfs=\"\"\n", ""},
|
||||
{"Dtb", "deviceinfo_dtb=\"freescale/imx8mq-librem5-r2 freescale/imx8mq-librem5-r3 freescale/imx8mq-librem5-r4\"\n",
|
||||
"freescale/imx8mq-librem5-r2 freescale/imx8mq-librem5-r3 freescale/imx8mq-librem5-r4"},
|
||||
// valid deviceinfo line, just not used in this module
|
||||
{"", "deviceinfo_codename=\"pine64-pinebookpro\"", ""},
|
||||
// line with comment at the end
|
||||
{"MesaDriver", "deviceinfo_mesa_driver=\"panfrost\" # this is a nice driver", "panfrost"},
|
||||
{"", "# this is a comment!\n", ""},
|
||||
// empty lines are fine
|
||||
{"", "", ""},
|
||||
// line with whitepace characters only
|
||||
{"", " \t \n\r", ""},
|
||||
}
|
||||
var d DeviceInfo
|
||||
for _, table := range tables {
|
||||
testName := fmt.Sprintf("unmarshal::'%s':", strings.ReplaceAll(table.in, "\n", "\\n"))
|
||||
if err := unmarshal(strings.NewReader(table.in), &d); err != nil {
|
||||
t.Errorf("%s received an unexpected err: ", err)
|
||||
}
|
||||
|
||||
// Check against expected value
|
||||
field := reflect.ValueOf(&d).Elem().FieldByName(table.field)
|
||||
out := ""
|
||||
if table.field != "" {
|
||||
out = field.String()
|
||||
}
|
||||
if out != table.expected {
|
||||
t.Errorf("%s expected: %q, got: %q", testName, table.expected, out)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@@ -1,5 +1,6 @@
|
||||
// Copyright 2021 Clayton Craft <clayton@craftyguy.net>
|
||||
// Copyright 2022 Clayton Craft <clayton@craftyguy.net>
|
||||
// SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
package misc
|
||||
|
||||
import (
|
||||
@@ -9,8 +10,6 @@ import (
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
type StringSet map[string]bool
|
||||
|
||||
// Converts a relative symlink target path (e.g. ../../lib/foo.so), that is
|
||||
// absolute path
|
||||
func RelativeSymlinkTargetToDir(symPath string, dir string) (string, error) {
|
||||
@@ -47,3 +46,31 @@ func FreeSpace(path string) (uint64, error) {
|
||||
size := stat.Bavail * uint64(stat.Bsize)
|
||||
return size, nil
|
||||
}
|
||||
|
||||
// Merge the contents of "b" into "a", overwriting any previously existing keys
|
||||
// in "a"
|
||||
func Merge(a map[string]string, b map[string]string) {
|
||||
for k, v := range b {
|
||||
a[k] = v
|
||||
}
|
||||
}
|
||||
|
||||
// Removes duplicate entries from the given string slice and returns a slice
|
||||
// with the unique values
|
||||
func RemoveDuplicates(in []string) (out []string) {
|
||||
// use a map to "remove" duplicates. the value in the map is totally
|
||||
// irrelevant
|
||||
outMap := make(map[string]bool)
|
||||
for _, s := range in {
|
||||
if ok := outMap[s]; !ok {
|
||||
outMap[s] = true
|
||||
}
|
||||
}
|
||||
|
||||
out = make([]string, 0, len(outMap))
|
||||
for k := range outMap {
|
||||
out = append(out, k)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
125
pkgs/misc/misc_test.go
Normal file
125
pkgs/misc/misc_test.go
Normal file
@@ -0,0 +1,125 @@
|
||||
// Copyright 2022 Clayton Craft <clayton@craftyguy.net>
|
||||
// SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
package misc
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"sort"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestMerge(t *testing.T) {
|
||||
subtests := []struct {
|
||||
name string
|
||||
inA map[string]string
|
||||
inB map[string]string
|
||||
expected map[string]string
|
||||
}{
|
||||
{
|
||||
name: "empty B",
|
||||
inA: map[string]string{
|
||||
"foo": "bar",
|
||||
"banana": "airplane",
|
||||
},
|
||||
inB: map[string]string{},
|
||||
expected: map[string]string{
|
||||
"foo": "bar",
|
||||
"banana": "airplane",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "empty A",
|
||||
inA: map[string]string{},
|
||||
inB: map[string]string{
|
||||
"foo": "bar",
|
||||
"banana": "airplane",
|
||||
},
|
||||
expected: map[string]string{
|
||||
"foo": "bar",
|
||||
"banana": "airplane",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "both populated, some duplicates",
|
||||
inA: map[string]string{
|
||||
"bar": "bazz",
|
||||
"banana": "yellow",
|
||||
"guava": "green",
|
||||
},
|
||||
inB: map[string]string{
|
||||
"foo": "bar",
|
||||
"banana": "airplane",
|
||||
},
|
||||
expected: map[string]string{
|
||||
"foo": "bar",
|
||||
"guava": "green",
|
||||
"banana": "airplane",
|
||||
"bar": "bazz",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, st := range subtests {
|
||||
t.Run(st.name, func(t *testing.T) {
|
||||
out := st.inA
|
||||
Merge(out, st.inB)
|
||||
if !reflect.DeepEqual(st.expected, out) {
|
||||
t.Fatalf("expected: %q, got: %q\n", st.expected, out)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestRemoveDuplicates(t *testing.T) {
|
||||
subtests := []struct {
|
||||
name string
|
||||
in []string
|
||||
expected []string
|
||||
}{
|
||||
{
|
||||
name: "no duplicates",
|
||||
in: []string{
|
||||
"foo",
|
||||
"bar",
|
||||
"banana",
|
||||
"airplane",
|
||||
},
|
||||
expected: []string{
|
||||
"foo",
|
||||
"bar",
|
||||
"banana",
|
||||
"airplane",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "all duplicates",
|
||||
in: []string{
|
||||
"foo",
|
||||
"foo",
|
||||
"foo",
|
||||
"foo",
|
||||
},
|
||||
expected: []string{
|
||||
"foo",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "empty",
|
||||
in: []string{},
|
||||
expected: []string{},
|
||||
},
|
||||
}
|
||||
|
||||
for _, st := range subtests {
|
||||
t.Run(st.name, func(t *testing.T) {
|
||||
// note: sorting to make comparison easier later
|
||||
sort.Strings(st.expected)
|
||||
out := RemoveDuplicates(st.in)
|
||||
sort.Strings(out)
|
||||
if !reflect.DeepEqual(st.expected, out) {
|
||||
t.Fatalf("expected: %q, got: %q\n", st.expected, out)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user