godoc-static/main.go

686 lines
16 KiB
Go
Raw Permalink Normal View History

// Package godoc-static generates static Go documentation
2020-02-05 17:28:02 +00:00
package main
import (
2020-02-26 14:10:28 +00:00
"archive/zip"
2020-02-05 17:28:02 +00:00
"bytes"
"errors"
2020-02-05 17:28:02 +00:00
"flag"
"fmt"
2020-09-10 22:44:49 +00:00
"go/build"
2020-02-05 17:28:02 +00:00
"io/ioutil"
"log"
"net/http"
"os"
"os/exec"
"os/signal"
2020-02-05 17:28:02 +00:00
"path"
"path/filepath"
"sort"
"strings"
"time"
"github.com/PuerkitoBio/goquery"
"github.com/yuin/goldmark"
"github.com/yuin/goldmark/extension"
gmhtml "github.com/yuin/goldmark/renderer/html"
2020-09-10 22:44:49 +00:00
"golang.org/x/mod/modfile"
2020-02-05 17:28:02 +00:00
"golang.org/x/net/html"
)
var (
listenAddress string
siteName string
siteDescription string
siteDescriptionFile string
2020-02-08 15:10:03 +00:00
siteFooter string
siteFooterFile string
2020-02-10 16:07:32 +00:00
siteDestination string
2020-02-26 14:10:28 +00:00
siteZip string
disableFilter bool
2020-02-05 17:28:02 +00:00
linkIndex bool
excludePackages string
2021-05-02 05:17:39 +00:00
quiet bool
2020-02-05 17:28:02 +00:00
verbose bool
2020-09-10 22:44:49 +00:00
goPath string
godoc *exec.Cmd
godocEnv []string
godocStartDir string
outZip *zip.Writer
scanIncomplete = []byte(`<span class="alert" style="font-size:120%">Scan is not yet complete.`)
2020-02-05 17:28:02 +00:00
)
func main() {
2020-02-06 22:59:44 +00:00
log.SetPrefix("")
log.SetFlags(0)
2021-05-02 05:17:39 +00:00
if quiet {
log.SetOutput(ioutil.Discard)
}
2020-02-06 22:59:44 +00:00
2020-02-05 17:28:02 +00:00
flag.StringVar(&listenAddress, "listen-address", "localhost:9001", "address for godoc to listen on while scraping pages")
flag.StringVar(&siteName, "site-name", "Documentation", "site name")
flag.StringVar(&siteDescription, "site-description", "", "site description (markdown-enabled)")
flag.StringVar(&siteDescriptionFile, "site-description-file", "", "path to markdown file containing site description")
2020-02-08 15:10:03 +00:00
flag.StringVar(&siteFooter, "site-footer", "", "site footer (markdown-enabled)")
flag.StringVar(&siteFooterFile, "site-footer-file", "", "path to markdown file containing site footer")
2020-02-26 14:10:28 +00:00
flag.StringVar(&siteDestination, "destination", "", "path to write site HTML")
flag.StringVar(&siteZip, "zip", "docs.zip", "name of site ZIP file (blank to disable)")
flag.BoolVar(&disableFilter, "disable-filter", false, `do not exclude packages named "testdata", "internal", or "cmd"`)
2020-02-05 17:28:02 +00:00
flag.BoolVar(&linkIndex, "link-index", false, "set link targets to index.html instead of folder")
flag.StringVar(&excludePackages, "exclude", "", "list of packages to exclude from index")
2021-05-02 05:17:39 +00:00
flag.BoolVar(&quiet, "quiet", false, "disable all logging except errors")
2020-02-05 17:28:02 +00:00
flag.BoolVar(&verbose, "verbose", false, "enable verbose logging")
flag.Parse()
err := run()
if godoc != nil && godoc.Process != nil {
godoc.Process.Kill()
}
if err != nil {
log.Fatal(err)
}
}
var skipPackages = []string{"cmd", "internal", "testdata"}
2020-09-09 05:43:05 +00:00
func filterPkgsWithExcludes(pkgs []string) []string {
excludePackagesSplit := strings.Split(excludePackages, " ")
var tmpPkgs []string
PACKAGEINDEX:
for _, pkg := range pkgs {
for _, excludePackage := range excludePackagesSplit {
if pkg == excludePackage || strings.HasPrefix(pkg, excludePackage+"/") {
2020-09-09 05:43:05 +00:00
continue PACKAGEINDEX
}
}
if !disableFilter {
for _, skipPackage := range skipPackages {
if strings.Contains("/"+pkg+"/", "/"+skipPackage+"/") {
continue PACKAGEINDEX
}
}
2020-09-09 05:43:05 +00:00
}
tmpPkgs = append(tmpPkgs, pkg)
}
return tmpPkgs
}
func getTmpDir() string {
tmpDir := os.TempDir()
if _, err := os.Stat(tmpDir); os.IsNotExist(err) {
mkDirErr := os.MkdirAll(tmpDir, 0755)
if _, err = os.Stat(tmpDir); os.IsNotExist(err) {
log.Fatalf("failed to create missing temporary directory %s: %s", tmpDir, mkDirErr)
}
}
return tmpDir
}
2020-02-26 14:10:28 +00:00
func writeFile(buf *bytes.Buffer, fileDir string, fileName string) error {
if outZip != nil {
fn := fileDir
if fn != "" {
fn += "/"
}
fn += fileName
outZipFile, err := outZip.Create(fn)
if err != nil {
2023-06-10 01:34:00 +00:00
return fmt.Errorf("failed to create file %s in zip archive: %s", fn, err)
2020-02-26 14:10:28 +00:00
}
_, err = outZipFile.Write(buf.Bytes())
if err != nil {
2023-06-10 01:34:00 +00:00
return fmt.Errorf("failed to write file %s into zip archive: %s", fn, err)
2020-02-26 14:10:28 +00:00
}
}
return ioutil.WriteFile(path.Join(siteDestination, fileDir, fileName), buf.Bytes(), 0755)
}
2020-09-10 22:44:49 +00:00
func startGodoc(dir string) {
if dir == godocStartDir {
return // Already started
}
godocStartDir = dir
if godoc != nil {
godoc.Process.Kill()
godoc.Wait()
}
2023-11-03 06:37:17 +00:00
const attempts = 3
for i := 0; i < attempts; i++ {
godoc = exec.Command("godoc", fmt.Sprintf("-http=%s", listenAddress))
godoc.Env = godocEnv
if dir == "" {
godoc.Dir = os.TempDir()
} else {
godoc.Dir = dir
}
godoc.Stdin = nil
godoc.Stdout = nil
godoc.Stderr = nil
setDeathSignal(godoc)
2020-09-10 22:44:49 +00:00
2023-11-03 06:37:17 +00:00
err := godoc.Start()
if err != nil {
if i == attempts-1 {
log.Fatalf("failed to execute godoc: %s\ninstall godoc by running: go get golang.org/x/tools/cmd/godoc\nthen ensure ~/go/bin is in $PATH", err)
}
time.Sleep(10 * time.Millisecond)
} else {
return
}
2020-09-10 22:44:49 +00:00
}
}
func run() error {
2020-09-10 22:44:49 +00:00
var (
timeStarted = time.Now()
buf bytes.Buffer
err error
)
2020-02-05 17:28:02 +00:00
2020-02-10 16:07:32 +00:00
if siteDestination == "" {
return errors.New("--destination must be set")
2020-02-05 17:28:02 +00:00
}
if siteDescriptionFile != "" {
siteDescriptionBytes, err := ioutil.ReadFile(siteDescriptionFile)
if err != nil {
return fmt.Errorf("failed to read site description file %s: %s", siteDescriptionFile, err)
2020-02-05 17:28:02 +00:00
}
siteDescription = string(siteDescriptionBytes)
}
if siteDescription != "" {
markdown := goldmark.New(
goldmark.WithRendererOptions(
gmhtml.WithUnsafe(),
),
goldmark.WithExtensions(
extension.NewLinkify(),
),
)
buf.Reset()
err := markdown.Convert([]byte(siteDescription), &buf)
if err != nil {
return fmt.Errorf("failed to render site description markdown: %s", err)
2020-02-05 17:28:02 +00:00
}
siteDescription = buf.String()
}
2020-02-08 15:10:03 +00:00
if siteFooterFile != "" {
siteFooterBytes, err := ioutil.ReadFile(siteFooterFile)
if err != nil {
return fmt.Errorf("failed to read site footer file %s: %s", siteFooterFile, err)
}
siteFooter = string(siteFooterBytes)
}
if siteFooter != "" {
markdown := goldmark.New(
goldmark.WithRendererOptions(
gmhtml.WithUnsafe(),
),
goldmark.WithExtensions(
extension.NewLinkify(),
),
)
buf.Reset()
err := markdown.Convert([]byte(siteFooter), &buf)
if err != nil {
return fmt.Errorf("failed to render site footer markdown: %s", err)
}
siteFooter = buf.String()
}
2020-02-26 14:10:28 +00:00
if siteZip != "" {
zipPath := filepath.Join(siteDestination, siteZip)
2023-11-03 06:37:17 +00:00
err = os.MkdirAll(filepath.Dir(zipPath), 0755)
2020-02-26 14:10:28 +00:00
if err != nil {
return fmt.Errorf("failed to make directory %s: %s", filepath.Dir(zipPath), err)
}
2023-11-03 06:37:17 +00:00
outZipFile, err := os.Create(zipPath)
if err != nil {
return fmt.Errorf("failed to create zip file %s: %s", zipPath, err)
2020-02-26 14:10:28 +00:00
}
defer outZipFile.Close()
outZip = zip.NewWriter(outZipFile)
defer outZip.Close()
2020-02-08 15:10:03 +00:00
}
2020-09-10 22:44:49 +00:00
goPath = os.Getenv("GOPATH")
if goPath == "" {
goPath = build.Default.GOPATH
2020-02-05 17:28:02 +00:00
}
2020-09-10 22:44:49 +00:00
godocEnv = make([]string, len(os.Environ()))
copy(godocEnv, os.Environ())
for i, e := range godocEnv {
if strings.HasPrefix(e, "GO111MODULE=") {
godocEnv[i] = ""
}
2020-02-05 17:28:02 +00:00
}
2020-09-10 22:44:49 +00:00
godocEnv = append(godocEnv, "GO111MODULE=auto")
godocStartDir = "-" // Trigger initial start
startGodoc("")
2020-02-05 17:28:02 +00:00
c := make(chan os.Signal, 1)
signal.Notify(c, os.Interrupt)
go func() {
<-c
godoc.Process.Kill()
os.Exit(1)
}()
2020-02-05 17:28:02 +00:00
pkgs := flag.Args()
2020-02-10 16:07:32 +00:00
if len(pkgs) == 0 || (len(pkgs) == 1 && pkgs[0] == "") {
buf.Reset()
2020-02-05 17:28:02 +00:00
2020-02-10 16:07:32 +00:00
cmd := exec.Command("go", "list", "...")
2020-09-10 22:44:49 +00:00
cmd.Env = godocEnv
2020-02-10 16:07:32 +00:00
cmd.Dir = os.TempDir()
cmd.Stdout = &buf
setDeathSignal(cmd)
err = cmd.Run()
if err != nil {
return fmt.Errorf("failed to list system packages: %s", err)
}
pkgs = strings.Split(strings.TrimSpace(buf.String()), "\n")
// Include the 'builtin' package as well, if not already present.
hasBuiltin := false
for _, pkg := range pkgs {
if pkg == "builtin" {
hasBuiltin = true
break
}
}
if !hasBuiltin {
pkgs = append(pkgs, "builtin")
}
}
2020-02-10 16:07:32 +00:00
var newPkgs []string
2020-09-10 22:44:49 +00:00
pkgPaths := make(map[string]string)
2020-02-05 17:28:02 +00:00
for _, pkg := range pkgs {
if strings.TrimSpace(pkg) == "" {
continue
}
2020-09-10 22:44:49 +00:00
var suppliedPath bool
dir := ""
if _, err := os.Stat(pkg); !os.IsNotExist(err) {
dir = pkg
modFileData, err := ioutil.ReadFile(path.Join(dir, "go.mod"))
if err != nil {
log.Fatalf("failed to read mod file for %s: %s", pkg, err)
}
modFile, err := modfile.Parse(path.Join(dir, "go.mod"), modFileData, nil)
if err != nil {
log.Fatalf("failed to parse mod file for %s: %s", pkg, err)
}
pkg = modFile.Module.Mod.Path
suppliedPath = true
} else {
srcDir := path.Join(goPath, "src", pkg)
if _, err := os.Stat(srcDir); !os.IsNotExist(err) {
dir = srcDir
}
}
2020-02-05 17:28:02 +00:00
newPkgs = append(newPkgs, pkg)
2020-09-10 22:44:49 +00:00
buf.Reset()
search := "./..."
if dir == "" {
search = pkg
}
cmd := exec.Command("go", "list", "-find", "-f", `{{ .ImportPath }} {{ .Dir }}`, search)
cmd.Env = godocEnv
if dir == "" {
cmd.Dir = os.TempDir()
} else {
cmd.Dir = dir
}
cmd.Stdout = &buf
2020-09-10 22:44:49 +00:00
cmd.Stderr = &buf
setDeathSignal(cmd)
2020-02-05 17:28:02 +00:00
err = cmd.Run()
2020-02-05 17:28:02 +00:00
if err != nil {
2020-09-10 22:44:49 +00:00
pkgPaths[pkg] = dir
continue
2020-02-05 17:28:02 +00:00
}
2020-09-10 22:44:49 +00:00
sourceListing := strings.Split(buf.String(), "\n")
for i := range sourceListing {
firstSpace := strings.Index(sourceListing[i], " ")
if firstSpace <= 0 {
continue
}
2020-02-05 17:28:02 +00:00
2020-09-10 22:44:49 +00:00
pkg = sourceListing[i][:firstSpace]
pkgPath := sourceListing[i][firstSpace+1:]
newPkgs = append(newPkgs, pkg)
if dir == "" || strings.HasPrefix(filepath.Base(pkgPath), ".") {
continue
}
if suppliedPath {
pkgPaths[pkg] = dir
} else {
pkgPaths[pkg] = pkgPath
2020-02-05 17:28:02 +00:00
}
}
buf.Reset()
}
pkgs = uniqueStrings(newPkgs)
if len(pkgs) == 0 {
return errors.New("failed to generate docs: provide the name of at least one package to generate documentation for")
2020-02-05 17:28:02 +00:00
}
filterPkgs := pkgs
for _, pkg := range pkgs {
subPkgs := strings.Split(pkg, "/")
for i := range subPkgs {
pkgs = append(pkgs, strings.Join(subPkgs[0:i+1], "/"))
}
}
2020-09-09 05:43:05 +00:00
pkgs = filterPkgsWithExcludes(uniqueStrings(pkgs))
2020-02-05 17:28:02 +00:00
sort.Slice(pkgs, func(i, j int) bool {
return strings.ToLower(pkgs[i]) < strings.ToLower(pkgs[j])
})
if !disableFilter {
filterPkgs = pkgs
}
done := make(chan error)
2020-02-05 17:28:02 +00:00
go func() {
var (
res *http.Response
2020-09-10 22:44:49 +00:00
doc *goquery.Document
2020-02-05 17:28:02 +00:00
err error
)
2020-09-10 22:44:49 +00:00
for _, pkg := range filterPkgs {
2020-02-06 22:59:44 +00:00
if verbose {
2020-09-10 22:44:49 +00:00
log.Printf("Copying %s documentation...", pkg)
2020-02-06 22:59:44 +00:00
}
2020-09-10 22:44:49 +00:00
startGodoc(pkgPaths[pkg])
2020-02-05 17:28:02 +00:00
// Rely on timeout to break loop
for {
res, err = http.Get(fmt.Sprintf("http://%s/pkg/%s/", listenAddress, pkg))
if err == nil {
2020-09-10 22:44:49 +00:00
body, err := ioutil.ReadAll(res.Body)
if err != nil {
done <- fmt.Errorf("failed to get page of %s: %s", pkg, err)
return
}
if bytes.Contains(body, scanIncomplete) {
time.Sleep(25 * time.Millisecond)
continue
}
// Load the HTML document
doc, err = goquery.NewDocumentFromReader(bytes.NewReader(body))
if err != nil {
done <- fmt.Errorf("failed to parse page of %s: %s", pkg, err)
return
}
2020-02-05 17:28:02 +00:00
break
}
}
doc.Find("title").First().SetHtml(fmt.Sprintf("%s - %s", path.Base(pkg), siteName))
2020-02-26 14:10:28 +00:00
updatePage(doc, relativeBasePath(pkg), siteName)
2020-02-05 17:28:02 +00:00
2020-02-10 16:07:32 +00:00
localPkgPath := path.Join(siteDestination, pkg)
2020-02-05 17:28:02 +00:00
err = os.MkdirAll(localPkgPath, 0755)
if err != nil {
done <- fmt.Errorf("failed to make directory %s: %s", localPkgPath, err)
return
2020-02-05 17:28:02 +00:00
}
buf.Reset()
err = html.Render(&buf, doc.Nodes[0])
if err != nil {
done <- fmt.Errorf("failed to render HTML: %s", err)
2020-02-05 17:28:02 +00:00
return
}
2020-02-26 14:10:28 +00:00
err = writeFile(&buf, pkg, "index.html")
2020-02-05 17:28:02 +00:00
if err != nil {
done <- fmt.Errorf("failed to write docs for %s: %s", pkg, err)
return
2020-02-05 17:28:02 +00:00
}
}
done <- nil
2020-02-05 17:28:02 +00:00
}()
select {
case err = <-done:
if err != nil {
return fmt.Errorf("failed to copy docs: %s", err)
}
2020-02-05 17:28:02 +00:00
}
// Write source files
2020-02-10 16:07:32 +00:00
err = os.MkdirAll(path.Join(siteDestination, "src"), 0755)
2020-02-05 17:28:02 +00:00
if err != nil {
return fmt.Errorf("failed to make directory src: %s", err)
2020-02-05 17:28:02 +00:00
}
for _, pkg := range filterPkgs {
2020-02-06 22:59:44 +00:00
if verbose {
log.Printf("Copying %s sources...", pkg)
}
2020-02-05 17:28:02 +00:00
buf.Reset()
2020-09-10 22:44:49 +00:00
dir := pkgPaths[pkg]
if dir == "" {
dir = getTmpDir()
}
startGodoc(pkgPaths[pkg])
cmd := exec.Command("go", "list", "-find", "-f",
2020-02-06 23:12:36 +00:00
`{{ join .GoFiles "\n" }}`+"\n"+
`{{ join .CgoFiles "\n" }}`+"\n"+
`{{ join .CFiles "\n" }}`+"\n"+
`{{ join .CXXFiles "\n" }}`+"\n"+
`{{ join .MFiles "\n" }}`+"\n"+
`{{ join .HFiles "\n" }}`+"\n"+
`{{ join .FFiles "\n" }}`+"\n"+
`{{ join .SFiles "\n" }}`+"\n"+
`{{ join .SwigFiles "\n" }}`+"\n"+
`{{ join .SwigCXXFiles "\n" }}`+"\n"+
`{{ join .TestGoFiles "\n" }}`+"\n"+
`{{ join .XTestGoFiles "\n" }}`,
pkg)
2020-09-10 22:44:49 +00:00
cmd.Env = godocEnv
cmd.Dir = dir
cmd.Stdout = &buf
setDeathSignal(cmd)
2020-02-05 17:28:02 +00:00
err = cmd.Run()
2020-02-05 17:28:02 +00:00
if err != nil {
//return fmt.Errorf("failed to list source files of package %s: %s", pkg, err)
2020-02-05 17:28:02 +00:00
continue // This is expected for packages without source files
}
2020-02-06 22:53:49 +00:00
sourceFiles := append(strings.Split(buf.String(), "\n"), "index.html")
2020-02-05 17:28:02 +00:00
for _, sourceFile := range sourceFiles {
sourceFile = strings.TrimSpace(sourceFile)
if sourceFile == "" {
continue
}
// Rely on timeout to break loop
2020-09-10 22:44:49 +00:00
var doc *goquery.Document
for {
res, err := http.Get(fmt.Sprintf("http://%s/src/%s/%s", listenAddress, pkg, sourceFile))
if err == nil {
body, err := ioutil.ReadAll(res.Body)
if err != nil {
return fmt.Errorf("failed to get source file page %s of %s: %s", sourceFile, pkg, err)
}
if bytes.Contains(body, scanIncomplete) {
time.Sleep(25 * time.Millisecond)
continue
}
// Load the HTML document
doc, err = goquery.NewDocumentFromReader(bytes.NewReader(body))
if err != nil {
return fmt.Errorf("failed to load document from page for package %s: %s", pkg, err)
}
2020-02-05 17:28:02 +00:00
2020-09-10 22:44:49 +00:00
break
}
2020-02-05 17:28:02 +00:00
}
doc.Find("title").First().SetHtml(fmt.Sprintf("%s - %s", path.Base(pkg), siteName))
2020-02-26 14:10:28 +00:00
updatePage(doc, relativeBasePath("src/"+pkg), siteName)
2020-02-05 17:28:02 +00:00
2020-02-06 22:53:49 +00:00
doc.Find(".layout").First().Find("a").Each(func(_ int, selection *goquery.Selection) {
href := selection.AttrOr("href", "")
if !strings.HasSuffix(href, ".") && !strings.HasSuffix(href, "/") && !strings.HasSuffix(href, ".html") {
selection.SetAttr("href", href+".html")
}
})
2020-02-10 16:07:32 +00:00
pkgSrcPath := path.Join(siteDestination, "src", pkg)
2020-02-05 17:28:02 +00:00
err = os.MkdirAll(pkgSrcPath, 0755)
if err != nil {
return fmt.Errorf("failed to make directory %s: %s", pkgSrcPath, err)
2020-02-05 17:28:02 +00:00
}
buf.Reset()
err = html.Render(&buf, doc.Nodes[0])
if err != nil {
return fmt.Errorf("failed to render HTML: %s", err)
2020-02-05 17:28:02 +00:00
}
2020-02-06 22:53:49 +00:00
outFileName := sourceFile
if !strings.HasSuffix(outFileName, ".html") {
outFileName += ".html"
}
2020-02-26 14:10:28 +00:00
err = writeFile(&buf, "src/"+pkg, outFileName)
2020-02-05 17:28:02 +00:00
if err != nil {
return fmt.Errorf("failed to write docs for %s: %s", pkg, err)
2020-02-05 17:28:02 +00:00
}
}
}
// Write style.css
if verbose {
log.Println("Copying style.css...")
}
2020-02-10 16:07:32 +00:00
err = os.MkdirAll(path.Join(siteDestination, "lib"), 0755)
2020-02-05 17:28:02 +00:00
if err != nil {
return fmt.Errorf("failed to make directory lib: %s", err)
2020-02-05 17:28:02 +00:00
}
2020-09-10 22:44:49 +00:00
for {
res, err := http.Get(fmt.Sprintf("http://%s/lib/godoc/style.css", listenAddress))
if err == nil {
buf.Reset()
2020-02-26 14:10:28 +00:00
2020-09-10 22:44:49 +00:00
_, err = buf.ReadFrom(res.Body)
res.Body.Close()
if err != nil {
return fmt.Errorf("failed to get style.css: %s", err)
}
break
}
2020-02-05 17:28:02 +00:00
}
2020-02-26 14:10:28 +00:00
buf.WriteString("\n" + additionalCSS)
2020-02-05 17:28:02 +00:00
2020-02-26 14:10:28 +00:00
err = writeFile(&buf, "lib", "style.css")
2020-02-05 17:28:02 +00:00
if err != nil {
2020-02-26 14:10:28 +00:00
return fmt.Errorf("failed to write style.css: %s", err)
2020-02-05 17:28:02 +00:00
}
// Write index
if verbose {
2020-09-10 22:44:49 +00:00
log.Println("Writing index.html...")
2020-02-05 17:28:02 +00:00
}
2020-02-26 14:10:28 +00:00
err = writeIndex(&buf, pkgs, filterPkgs)
if err != nil {
return fmt.Errorf("failed to write index: %s", err)
}
2020-02-05 17:28:02 +00:00
if verbose {
2020-02-06 22:59:44 +00:00
log.Printf("Generated documentation in %s.", time.Since(timeStarted).Round(time.Second))
2020-02-05 17:28:02 +00:00
}
return nil
2020-02-05 17:28:02 +00:00
}
2020-02-26 14:10:28 +00:00
func relativeBasePath(p string) string {
var r string
if p != "" {
r += "../"
}
p = filepath.ToSlash(p)
2020-02-26 14:10:28 +00:00
for i := strings.Count(p, "/"); i > 0; i-- {
r += "../"
}
return r
}
2020-02-05 17:28:02 +00:00
func uniqueStrings(strSlice []string) []string {
keys := make(map[string]bool)
var unique []string
for _, entry := range strSlice {
if _, value := keys[entry]; !value {
keys[entry] = true
unique = append(unique, entry)
}
}
return unique
}