diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..a1a9f7b --- /dev/null +++ b/.gitignore @@ -0,0 +1,76 @@ +# File created using '.gitignore Generator' for Visual Studio Code: https://bit.ly/vscode-gig +# Created by https://www.toptal.com/developers/gitignore/api/windows,visualstudiocode,go +# Edit at https://www.toptal.com/developers/gitignore?templates=windows,visualstudiocode,go + +### Go ### +# If you prefer the allow list template instead of the deny list, see community template: +# https://github.com/github/gitignore/blob/main/community/Golang/Go.AllowList.gitignore +# +# Binaries for programs and plugins +*.exe +*.exe~ +*.dll +*.so +*.dylib + +# Test binary, built with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out + +# Dependency directories (remove the comment below to include it) +# vendor/ + +# Go workspace file +go.work + +### VisualStudioCode ### +.vscode/* +!.vscode/settings.json +!.vscode/tasks.json +!.vscode/launch.json +!.vscode/extensions.json +!.vscode/*.code-snippets + +# Local History for Visual Studio Code +.history/ + +# Built Visual Studio Code Extensions +*.vsix + +### VisualStudioCode Patch ### +# Ignore all local history of files +.history +.ionide + +### Windows ### +# Windows thumbnail cache files +Thumbs.db +Thumbs.db:encryptable +ehthumbs.db +ehthumbs_vista.db + +# Dump file +*.stackdump + +# Folder config file +[Dd]esktop.ini + +# Recycle Bin used on file shares +$RECYCLE.BIN/ + +# Windows Installer files +*.cab +*.msi +*.msix +*.msm +*.msp + +# Windows shortcuts +*.lnk + +# End of https://www.toptal.com/developers/gitignore/api/windows,visualstudiocode,go + +# Custom rules (everything added below won't be overriden by 'Generate .gitignore File' if you use 'Update' option) + diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 0000000..ea3147c --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,24 @@ +{ + // Use IntelliSense to learn about possible attributes. + // Hover to view descriptions of existing attributes. + // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + + { + "name": "Launch static/index.html", + "type": "firefox", + "request": "launch", + "reAttach": true, + "file": "${workspaceFolder}/static/index.html", + }, + { + "name": "Launch Go application", + "type": "go", + "request": "launch", + "mode": "debug", + "program": "${workspaceFolder}", + "output": "${workspaceFolder}/debug_bin" + } + ] +} \ No newline at end of file diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000..ec71255 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,21 @@ +{ + "cSpell.words": [ + "afero", + "EXTRACTIMAGES", + "fclairamb", + "ftpserver", + "ftpserverdriver", + "ftpserverlib", + "Ingestor", + "jfif", + "Millimetres", + "neilpa", + "pdfcpu", + "Scanyonero", + "Subed", + "Typer", + "typst", + "unitless", + "xhttp" + ] +} \ No newline at end of file diff --git a/README.md b/README.md index 6371bdc..fbddf76 100644 --- a/README.md +++ b/README.md @@ -1,15 +1,19 @@ -# OCRmyPDF runner +# Scanyonero -A very simple tool that listens for files in a directory, and runs OCRmyPDF on them. +A tool for preprocessing scanned documents before they are sent to paperless. +Simply point your scanner to the FTP server this software provides, and it will do the rest. -This is needed as paperless(-ngx) will always create a copy of the document with its built in clean up and OCR feature. -Even external pre-consumption scripts will be run on all new documents, not just files in from consumption directory. -So the solution is to have this watchdog/runner that only pre-processes scanned documents, and leaves everything else untouched. +## Features -The idea is to let it watch a directory any scanner will scan into, and then this runner will write the final pre-processed document into a directory paperless watches. +- Built in FTP server, and therefore no need for temporary files or any other Rube Goldberg-esque setups. +- Web-app where you can preview, sort, split and merge your scans before sending them to paperless. +- Can use OCRmyPDF for adding searchable text to your scanned documents. +- Can clean up documents and make them as pristine and compact as possible. ## Usage +TODO: Update README.md + 1. Install the project somewhere. 2. Edit [main.go](main.go) to use the correct paths to your scanner and paperless consumption directories. 3. Copy the [ocrmypdf-runner.service](service/linux/systemd/ocrmypdf-runner.service) into your systemd services directory (`etc/systemd/system/ocrmypdf-runner.service`). You may have to change the user in the service file. diff --git a/document/file.go b/document/file.go new file mode 100644 index 0000000..bb88e52 --- /dev/null +++ b/document/file.go @@ -0,0 +1,37 @@ +package document + +import ( + "fmt" + "log" + "os" +) + +// File contains the raw data of a file coming from a scanner. +type File struct { + Name string + Data []byte +} + +// LoadFile reads a file and returns it as a File. +func LoadFile(filename string) (File, error) { + data, err := os.ReadFile(filename) + if err != nil { + return File{}, fmt.Errorf("couldn't read file: %w", err) + } + + return File{ + Name: filename, + Data: data, + }, nil +} + +// LoadFile reads a file and returns it as a File. +// This will panic on any error. +func MustLoadFile(filename string) File { + file, err := LoadFile(filename) + if err != nil { + log.Panicf("Failed to load file: %v.", err) + } + + return file +} diff --git a/document/ingestor-rules.go b/document/ingestor-rules.go new file mode 100644 index 0000000..7b2c7fd --- /dev/null +++ b/document/ingestor-rules.go @@ -0,0 +1,56 @@ +package document + +import ( + "Scanyonero/unit" + "regexp" +) + +type IngestorRule struct { + // All entries that are non nil have to match. + Match struct { + Name *regexp.Regexp // When this regular expression matches with the filename/filepath then the rule is used. + XPixels *int // When the scanned image width in pixels matches with the given amount of pixels, the rule is used. + YPixels *int // When the scanned image height in pixels matches with the given amount of pixels, the rule is used. + } + + // All non nil entries will be applied to the document pages. + Action struct { + MediumWidth *unit.Millimeter // Sets the width of the medium. + MediumHeight *unit.Millimeter // Sets the height of the medium. + ScanOffsetX *unit.Millimeter // Offsets the scan in the medium on the x axis. + ScanOffsetY *unit.Millimeter // Offsets the scan in the medium on the y axis. + } +} + +// Apply will check and apply the rule per ingested page. +func (rule IngestorRule) Apply(ingestor Ingestor, file File, page *Page) error { + // Match. + + if rule.Match.Name != nil && !rule.Match.Name.MatchString(file.Name) { + return nil + } + imageBounds := page.Image.Bounds() + if rule.Match.XPixels != nil && *rule.Match.XPixels != imageBounds.Dx() { + return nil + } + if rule.Match.YPixels != nil && *rule.Match.YPixels != imageBounds.Dy() { + return nil + } + + // Apply actions. + + if rule.Action.MediumWidth != nil { + page.Dimensions.MediumSize.X = *rule.Action.MediumWidth + } + if rule.Action.MediumHeight != nil { + page.Dimensions.MediumSize.Y = *rule.Action.MediumHeight + } + if rule.Action.ScanOffsetX != nil { + page.Dimensions.ScanSize.Origin.X = *rule.Action.ScanOffsetX + } + if rule.Action.ScanOffsetY != nil { + page.Dimensions.ScanSize.Origin.Y = *rule.Action.ScanOffsetY + } + + return nil +} diff --git a/document/ingestor.go b/document/ingestor.go new file mode 100644 index 0000000..b1bc274 --- /dev/null +++ b/document/ingestor.go @@ -0,0 +1,138 @@ +package document + +import ( + "Scanyonero/unit" + "bytes" + "fmt" + "image" + "path/filepath" + "strings" + + "github.com/chai2010/tiff" + "github.com/pdfcpu/pdfcpu/pkg/api" + "github.com/pdfcpu/pdfcpu/pkg/pdfcpu" + "github.com/pdfcpu/pdfcpu/pkg/pdfcpu/model" + + "image/jpeg" + "image/png" +) + +// Ingestor contains all settings and rules for image/document file ingestion. +type Ingestor struct { + DefaultDPI unit.PerInch // Default/fallback dots per inch value. + + Rules []IngestorRule +} + +func (ingestor Ingestor) Ingest(file File) ([]Page, error) { + ext := filepath.Ext(file.Name) + + var pages []Page + + switch strings.ToLower(ext) { + case ".jpg", ".jpeg": + img, err := jpeg.Decode(bytes.NewReader(file.Data)) + if err != nil { + return nil, fmt.Errorf("failed to decode JPEG file: %w", err) + } + dimensions := unit.NewPageDimensionsFromDensity(img.Bounds().Dx(), img.Bounds().Dy(), ingestor.DefaultDPI, ingestor.DefaultDPI) + if tag, err := decodeJFIF(bytes.NewReader(file.Data)); err == nil { + // Get more exact density info from the file metadata. + xDensity, yDensity := tag.Density() + dimensions = unit.NewPageDimensionsFromDensity(img.Bounds().Dx(), img.Bounds().Dy(), xDensity, yDensity) + } + pages = append(pages, Page{ + Image: img, + Dimensions: dimensions, + }) + + case ".png": + img, err := png.Decode(bytes.NewReader(file.Data)) + if err != nil { + return nil, fmt.Errorf("failed to decode PNG file: %w", err) + } + pages = append(pages, Page{ + Image: img, + Dimensions: unit.NewPageDimensionsFromDensity(img.Bounds().Dx(), img.Bounds().Dy(), ingestor.DefaultDPI, ingestor.DefaultDPI), + }) + // TODO: Read pixel density metadata from PNG file + + case ".tiff": + mm, _, err := tiff.DecodeAll(bytes.NewReader(file.Data)) + if err != nil { + return nil, fmt.Errorf("failed to decode TIFF file: %w", err) + } + for _, m := range mm { + for _, img := range m { + pages = append(pages, Page{ + Image: img, + Dimensions: unit.NewPageDimensionsFromDensity(img.Bounds().Dx(), img.Bounds().Dy(), ingestor.DefaultDPI, ingestor.DefaultDPI), + }) + // TODO: Read pixel density metadata from TIFF file + } + } + + case ".pdf": + conf := model.NewDefaultConfiguration() + conf.Cmd = model.EXTRACTIMAGES + ctx, err := api.ReadValidateAndOptimize(bytes.NewReader(file.Data), conf) + if err != nil { + return nil, fmt.Errorf("failed to read and validate PDF file: %w", err) + } + + boundaries, err := ctx.PageBoundaries(nil) + if err != nil { + return nil, fmt.Errorf("failed to get page dimensions: %w", err) + } + if len(boundaries) != ctx.PageCount { + return nil, fmt.Errorf("number of retrieved page boundaries (%d) and pages (%d) differ", len(boundaries), ctx.PageCount) + } + + for page := 1; page <= ctx.PageCount; page++ { + mm, err := pdfcpu.ExtractPageImages(ctx, page, false) + if err != nil { + return nil, fmt.Errorf("failed to extract image from page: %w", err) + } + if len(mm) == 0 { + return nil, fmt.Errorf("page %d doesn't contain any images", page) + } + if len(mm) > 1 { + return nil, fmt.Errorf("page %d contains %d images, expected 1", page, len(mm)) + } + + boundary := boundaries[page-1] + dim := boundary.Media.Rect.Dimensions().ToMillimetres() + dimX, dimY := unit.Millimeter(dim.Width), unit.Millimeter(dim.Height) + + // Decode only image of the page. + for _, m := range mm { + img, _, err := image.Decode(m) + if err != nil { + return nil, fmt.Errorf("failed to decode %q file: %w", ext, err) + } + + pages = append(pages, Page{ + Image: img, + Dimensions: unit.NewPageDimensionsFromLengths(dimX, dimY), + }) + + break + } + + } + + default: + return nil, fmt.Errorf("unsupported file extension %q", ext) + } + + for iPage := range pages { + page := &pages[iPage] + for i, rule := range ingestor.Rules { + if err := rule.Apply(ingestor, file, page); err != nil { + return nil, fmt.Errorf("failed to apply ingestor rule %d on page %d: %w", i, iPage, err) + } + } + } + + return pages, nil +} diff --git a/document/ingestor_test.go b/document/ingestor_test.go new file mode 100644 index 0000000..15bb07e --- /dev/null +++ b/document/ingestor_test.go @@ -0,0 +1,110 @@ +package document + +import ( + "Scanyonero/unit" + "path/filepath" + "regexp" + "testing" +) + +func TestIngestor_Ingest(t *testing.T) { + t.SkipNow() + + tests := []struct { + name string + file File + want []Page + wantErr bool + }{ + {"300 DPI Feed JPG", MustLoadFile(filepath.Join("..", "test-documents", "300 DPI Feeder.jpg")), + []Page{{ + Dimensions: unit.PageDimensions[unit.Millimeter]{ + ScanSize: unit.Rectangle[unit.Millimeter]{ + Size: unit.Vec2[unit.Millimeter]{X: 209.97, Y: 288.713}}}, + }}, false}, + {"300 DPI Feed PDF", MustLoadFile(filepath.Join("..", "test-documents", "300 DPI Feeder.pdf")), + []Page{{ + Dimensions: unit.PageDimensions[unit.Millimeter]{ + ScanSize: unit.Rectangle[unit.Millimeter]{ + Size: unit.Vec2[unit.Millimeter]{X: 209.8, Y: 288.5}}}, + }}, false}, + {"300 DPI Flat JPG", MustLoadFile(filepath.Join("..", "test-documents", "300 DPI Flatbed.jpg")), + []Page{{ + Dimensions: unit.PageDimensions[unit.Millimeter]{ + ScanSize: unit.Rectangle[unit.Millimeter]{ + Size: unit.Vec2[unit.Millimeter]{X: 203.2, Y: 290.83}}}, + }}, false}, + {"300 DPI Flat PDF", MustLoadFile(filepath.Join("..", "test-documents", "300 DPI Flatbed.pdf")), + []Page{{ + Dimensions: unit.PageDimensions[unit.Millimeter]{ + ScanSize: unit.Rectangle[unit.Millimeter]{ + Size: unit.Vec2[unit.Millimeter]{X: 203.2, Y: 290.8}}}, + }}, false}, + {"600 DPI Feed JPG", MustLoadFile(filepath.Join("..", "test-documents", "600 DPI Feeder.jpg")), + []Page{{ + Dimensions: unit.PageDimensions[unit.Millimeter]{ + ScanSize: unit.Rectangle[unit.Millimeter]{ + Size: unit.Vec2[unit.Millimeter]{X: 209.97, Y: 288.671}}}, + }}, false}, + {"600 DPI Feed PDF", MustLoadFile(filepath.Join("..", "test-documents", "600 DPI Feeder.pdf")), + []Page{{ + Dimensions: unit.PageDimensions[unit.Millimeter]{ + ScanSize: unit.Rectangle[unit.Millimeter]{ + Size: unit.Vec2[unit.Millimeter]{X: 209.8, Y: 288.0}}}, + }}, false}, + {"600 DPI Flat JPG", MustLoadFile(filepath.Join("..", "test-documents", "600 DPI Flatbed.jpg")), + []Page{{ + Dimensions: unit.PageDimensions[unit.Millimeter]{ + ScanSize: unit.Rectangle[unit.Millimeter]{ + Size: unit.Vec2[unit.Millimeter]{X: 203.88, Y: 290.872}}}, + }}, false}, + {"600 DPI Flat PDF", MustLoadFile(filepath.Join("..", "test-documents", "600 DPI Flatbed.pdf")), + []Page{{ + Dimensions: unit.PageDimensions[unit.Millimeter]{ + ScanSize: unit.Rectangle[unit.Millimeter]{ + Size: unit.Vec2[unit.Millimeter]{X: 203.7, Y: 290.8}}}, + }}, false}, + } + + ingestor := Ingestor{ + DefaultDPI: unit.PerInch(150), + Rules: []IngestorRule{{ + Match: struct { + Name *regexp.Regexp + XPixels *int + YPixels *int + }{ + Name: regexp.MustCompile(`^.*\.pdf$`), + }, + Action: struct { + MediumWidth *unit.Millimeter + MediumHeight *unit.Millimeter + ScanOffsetX *unit.Millimeter + ScanOffsetY *unit.Millimeter + }{ + MediumWidth: &([]unit.Millimeter{100}[0]), + }, + }}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := ingestor.Ingest(tt.file) + if (err != nil) != tt.wantErr { + t.Errorf("Ingestor.Ingest() error = %v, wantErr %v.", err, tt.wantErr) + return + } + if len(got) != len(tt.want) { + t.Errorf("Got %d pages, but want %d pages.", len(got), len(tt.want)) + return + } + for i, gotPage := range got { + wantPage := tt.want[i] + if !gotPage.Dimensions.ScanSize.Size.EqualWithPrecision(wantPage.Dimensions.ScanSize.Size, 0.1) { + t.Errorf("Resulting ScanSize is %v, want %v.", gotPage.Dimensions.ScanSize.Size, wantPage.Dimensions.ScanSize.Size) + } + } + + }) + } +} diff --git a/document/jfif.go b/document/jfif.go new file mode 100644 index 0000000..d8a30e4 --- /dev/null +++ b/document/jfif.go @@ -0,0 +1,72 @@ +package document + +import ( + "Scanyonero/unit" + "encoding/binary" + "fmt" + "io" + + "neilpa.me/go-jfif" +) + +type jfifTagUnits byte + +const ( + jfifTagUnitsNoUnits jfifTagUnits = iota + jfifTagUnitsDotsPerInch + jfifTagUnitsDotsPerCM +) + +type jfifTag struct { + Version uint16 + Units jfifTagUnits + XDensity uint16 + YDensity uint16 + // Omit thumbnail width, height and pixel data. +} + +func (j *jfifTag) UnmarshalBinary(data []byte) error { + if len(data) < 7 { + return fmt.Errorf("JFIF tag length (%d) is smaller than expected (%d)", len(data), 7) + } + + j.Version = binary.BigEndian.Uint16(data[0:2]) + j.Units = jfifTagUnits(data[2]) + j.XDensity = binary.BigEndian.Uint16(data[3:5]) + j.YDensity = binary.BigEndian.Uint16(data[5:7]) + + return nil +} + +// Density returns the number of pixels per length unit. +func (j *jfifTag) Density() (x, y unit.Density) { + switch j.Units { + case jfifTagUnitsDotsPerInch: + return unit.PerInch(j.XDensity), unit.PerInch(j.YDensity) + case jfifTagUnitsDotsPerCM: + return unit.PerMillimeter(j.XDensity) / 10, unit.PerMillimeter(j.YDensity) / 10 + } + + return nil, nil +} + +func decodeJFIF(r io.Reader) (jfifTag, error) { + segments, err := jfif.DecodeSegments(r) + if err != nil { + return jfifTag{}, fmt.Errorf("failed to decode JPEG segments: %w", err) + } + + for _, segment := range segments { + sig, data, _ := segment.AppPayload() + switch sig { + case jfif.SigJFIF: + tag := jfifTag{} + if err := tag.UnmarshalBinary(data); err != nil { + return jfifTag{}, fmt.Errorf("failed to unmarshal tag data: %w", err) + } + return tag, nil + } + } + + return jfifTag{}, fmt.Errorf("couldn't find any JFIF tag") +} diff --git a/document/page.go b/document/page.go new file mode 100644 index 0000000..0ff4c62 --- /dev/null +++ b/document/page.go @@ -0,0 +1,12 @@ +package document + +import ( + "Scanyonero/unit" + "image" +) + +type Page struct { + Image image.Image `json:"-"` + + Dimensions unit.PageDimensions[unit.Millimeter] `json:"dimensions"` +} diff --git a/ftpserver/driver.go b/ftpserver/driver.go new file mode 100644 index 0000000..d7797cc --- /dev/null +++ b/ftpserver/driver.go @@ -0,0 +1,57 @@ +package ftpserver + +import ( + "crypto/tls" + "fmt" + + ftpserver "github.com/fclairamb/ftpserverlib" + "github.com/spf13/afero" +) + +type driver struct { + User, Pass string + ListenAddr string + + ClientDriver afero.Fs +} + +// GetSettings returns some general settings around the server setup +func (d *driver) GetSettings() (*ftpserver.Settings, error) { + //log.Printf("Passed settings to ftpserverlib.") + + return &ftpserver.Settings{ + ListenAddr: d.ListenAddr, + DefaultTransferType: ftpserver.TransferTypeBinary, + }, nil +} + +// ClientConnected is called to send the very first welcome message +func (d *driver) ClientConnected(cc ftpserver.ClientContext) (string, error) { + //log.Printf("Client %q connected.", cc.RemoteAddr()) + + return "Welcome to the Scanyonero FTP server.", nil +} + +// ClientDisconnected is called when the user disconnects, even if he never authenticated +func (d *driver) ClientDisconnected(cc ftpserver.ClientContext) { + //log.Printf("Client %q disconnected.", cc.RemoteAddr()) +} + +// AuthUser authenticates the user and selects an handling driver +func (d *driver) AuthUser(cc ftpserver.ClientContext, user, pass string) (ftpserver.ClientDriver, error) { + if d.User != "" && d.Pass != "" { + if user != d.User || pass != d.Pass { + return nil, fmt.Errorf("wrong username or password") + } + } + + //log.Printf("Client %q authenticated with %q and %q.", cc.RemoteAddr(), user, pass) + + return d.ClientDriver, nil +} + +// GetTLSConfig returns a TLS Certificate to use +// The certificate could frequently change if we use something like "let's encrypt" +func (d *driver) GetTLSConfig() (*tls.Config, error) { + return nil, fmt.Errorf("tls not implemented") +} diff --git a/ftpserver/driver_test.go b/ftpserver/driver_test.go new file mode 100644 index 0000000..0978d2b --- /dev/null +++ b/ftpserver/driver_test.go @@ -0,0 +1,20 @@ +package ftpserver_test + +import ( + "Scanyonero/ftpserver" + "log" + "testing" +) + +func TestDriver(t *testing.T) { + t.SkipNow() + + server, err := ftpserver.NewFTPServer("", "", "127.0.0.1:21") + if err != nil { + log.Panicf("Failed to create FTP-Server: %v.", err) + } + + for file := range server.FileChan() { + log.Printf("Received file %v with %d bytes.", file.Name, len(file.Data)) + } +} diff --git a/ftpserver/ftp-server.go b/ftpserver/ftp-server.go new file mode 100644 index 0000000..e8fc593 --- /dev/null +++ b/ftpserver/ftp-server.go @@ -0,0 +1,80 @@ +package ftpserver + +import ( + "Scanyonero/document" + "fmt" + "sync" + + ftpserverlib "github.com/fclairamb/ftpserverlib" +) + +type FTPServer struct { + files chan document.File + + ftpServer *ftpserverlib.FtpServer +} + +// NewFTPServer returns a new FTP server instance. +// This will also start the server. +func NewFTPServer(user, pass, listenAddr string) (*FTPServer, error) { + fs := &virtualFS{} + + driver := &driver{ + User: user, + Pass: pass, + ListenAddr: listenAddr, + ClientDriver: fs, + } + + s := &FTPServer{ + files: make(chan document.File), + ftpServer: ftpserverlib.NewFtpServer(driver), + } + + // Handler callback for newly uploaded files. + // We will pass the files into the files channel. + var closed bool + var mutex sync.Mutex + fs.Callback = func(file document.File) error { + mutex.Lock() + defer mutex.Unlock() + if closed { + return fmt.Errorf("server is closing") + } + + s.files <- file + return nil + } + + // Create listener. + if err := s.ftpServer.Listen(); err != nil { + return nil, err + } + + // Start listening. + go func() { + s.ftpServer.Serve() + + // We will be shutting down everything soon. + // Ensure that no new files will be written into the files channel. + mutex.Lock() + closed = true + mutex.Unlock() + + close(s.files) + }() + + return s, nil +} + +// Returns the file channel where files uploaded to the FTP-Server will be sent to. +// +// The channel will automatically be closed after Stop() has been called. +func (s *FTPServer) FileChan() <-chan document.File { + return s.files +} + +// Shuts the FTP-Server down. +func (s *FTPServer) Stop() error { + return s.ftpServer.Stop() +} diff --git a/ftpserver/utils.go b/ftpserver/utils.go new file mode 100644 index 0000000..736c7b7 --- /dev/null +++ b/ftpserver/utils.go @@ -0,0 +1,16 @@ +package ftpserver + +import "path/filepath" + +func normalizePath(path string) string { + path = filepath.Clean(path) + + switch path { + case ".": + return string(filePathSeparator) + case "..": + return string(filePathSeparator) + default: + return path + } +} diff --git a/ftpserver/virtual-fs.go b/ftpserver/virtual-fs.go new file mode 100644 index 0000000..3d89d88 --- /dev/null +++ b/ftpserver/virtual-fs.go @@ -0,0 +1,146 @@ +package ftpserver + +import ( + "Scanyonero/document" + "io" + "os" + "time" + + "github.com/spf13/afero" + "github.com/spf13/afero/mem" +) + +const filePathSeparator = os.PathSeparator + +type virtualFile struct { + mem.File + + Callback func(file document.File) error +} + +// Override close behavior. +// When the file handle is closed, we will forward the file content via callback. +func (v *virtualFile) Close() error { + if v.Callback != nil { + file := document.File{ + Name: v.Name(), + } + + var err error + if file.Data, err = io.ReadAll(mem.NewReadOnlyFileHandle(v.Data())); err != nil { + v.File.Close() + return err + } + + if err := v.Callback(file); err != nil { + v.File.Close() + return err + } + } + + return v.File.Close() +} + +var _ afero.File = &virtualFile{} + +type virtualFS struct { + Callback func(file document.File) error +} + +var _ afero.Fs = &virtualFS{} + +// Create creates a file in the filesystem, returning the file and an error, if any happens. +func (v *virtualFS) Create(name string) (afero.File, error) { + name = normalizePath(name) + //log.Printf("VirtualFS: Create: %v", name) + file := mem.CreateFile(name) + fileHandle := *mem.NewFileHandle(file) + return &virtualFile{File: fileHandle, Callback: v.Callback}, nil +} + +// Mkdir creates a directory in the filesystem, return an error if any happens. +func (v *virtualFS) Mkdir(name string, perm os.FileMode) error { + //name = normalizePath(name) + //log.Printf("VirtualFS: Mkdir: %v, %v", name, perm) + return nil +} + +// MkdirAll creates a directory path and all parents that does not exist yet. +func (v *virtualFS) MkdirAll(path string, perm os.FileMode) error { + //path = normalizePath(path) + //log.Printf("VirtualFS: MkdirAll: %v, %v", path, perm) + return nil +} + +// Open opens a file, returning it or an error, if any happens. +func (v *virtualFS) Open(name string) (afero.File, error) { + name = normalizePath(name) + //log.Printf("VirtualFS: Open: %v", name) + dir := mem.CreateDir(name) + mem.SetMode(dir, os.ModeDir|0o755) + return mem.NewReadOnlyFileHandle(dir), nil +} + +// OpenFile opens a file using the given flags and the given mode. +func (v *virtualFS) OpenFile(name string, flag int, perm os.FileMode) (afero.File, error) { + name = normalizePath(name) + //log.Printf("VirtualFS: OpenFile: %v, %v, %v", name, flag, perm) + return v.Create(name) +} + +// Remove removes a file identified by name, returning an error, if any happens. +func (v *virtualFS) Remove(name string) error { + name = normalizePath(name) + //log.Printf("VirtualFS: Remove: %v", name) + return &os.PathError{Op: "remove", Path: name, Err: os.ErrNotExist} +} + +// RemoveAll removes a directory path and any children it contains. It does not fail if the path does not exist (return nil). +func (v *virtualFS) RemoveAll(path string) error { + //path = normalizePath(path) + //log.Printf("VirtualFS: RemoveAll: %v", path) + return nil +} + +// Rename renames a file. +func (v *virtualFS) Rename(oldName, newName string) error { + oldName = normalizePath(oldName) + //newName = normalizePath(newName) + //log.Printf("VirtualFS: Rename: %v -> %v", oldName, newName) + return &os.PathError{Op: "rename", Path: oldName, Err: os.ErrNotExist} +} + +// Stat returns a FileInfo describing the named file, or an error, if any happens. +func (v *virtualFS) Stat(name string) (os.FileInfo, error) { + name = normalizePath(name) + //log.Printf("VirtualFS: Stat: %v", name) + dir := mem.CreateDir(name) + mem.SetMode(dir, os.ModeDir|0o755) + return mem.GetFileInfo(dir), nil +} + +// The name of this FileSystem. +func (v *virtualFS) Name() string { + return "ScanyoneroVirtualFS" +} + +// Chmod changes the mode of the named file to mode. +func (v *virtualFS) Chmod(name string, mode os.FileMode) error { + name = normalizePath(name) + //log.Printf("VirtualFS: Chmod: %v, %v", name, mode) + return &os.PathError{Op: "chmod", Path: name, Err: os.ErrNotExist} +} + +// Chown changes the uid and gid of the named file. +func (v *virtualFS) Chown(name string, uid, gid int) error { + name = normalizePath(name) + //log.Printf("VirtualFS: Chown: %v, %v, %v", name, uid, gid) + return &os.PathError{Op: "chown", Path: name, Err: os.ErrNotExist} +} + +// Chtimes changes the access and modification times of the named file. +func (v *virtualFS) Chtimes(name string, atime time.Time, mtime time.Time) error { + name = normalizePath(name) + //log.Printf("VirtualFS: Chtimes: %v, %v, %v", name, atime, mtime) + return &os.PathError{Op: "chtimes", Path: name, Err: os.ErrNotExist} +} diff --git a/go.mod b/go.mod index 8a1c427..6e41f24 100644 --- a/go.mod +++ b/go.mod @@ -1,3 +1,31 @@ -module ocrmypdf-runner +module Scanyonero -go 1.19 \ No newline at end of file +go 1.23.0 + +toolchain go1.24.2 + +require ( + github.com/chai2010/tiff v0.0.0-20211005095045-4ec2aa243943 + github.com/fclairamb/ftpserverlib v0.25.0 + github.com/gorilla/websocket v1.5.3 + github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646 + github.com/pdfcpu/pdfcpu v0.10.2 + github.com/spf13/afero v1.14.0 + neilpa.me/go-jfif v0.5.0 +) + +require ( + github.com/fclairamb/go-log v0.5.0 // indirect + github.com/hhrutter/lzw v1.0.0 // indirect + github.com/hhrutter/pkcs7 v0.2.0 // indirect + github.com/hhrutter/tiff v1.0.2 // indirect + github.com/mattn/go-runewidth v0.0.16 // indirect + github.com/pkg/errors v0.9.1 // indirect + github.com/rivo/uniseg v0.4.7 // indirect + golang.org/x/crypto v0.37.0 // indirect + golang.org/x/image v0.26.0 // indirect + golang.org/x/sys v0.32.0 // indirect + golang.org/x/text v0.24.0 // indirect + gopkg.in/yaml.v2 v2.4.0 // indirect + neilpa.me/go-x v0.2.0 // indirect +) diff --git a/go.sum b/go.sum new file mode 100644 index 0000000..ff38b6f --- /dev/null +++ b/go.sum @@ -0,0 +1,57 @@ +github.com/chai2010/tiff v0.0.0-20211005095045-4ec2aa243943 h1:CjuhVIUiyWQZVY4rmcvm/9R+60e/Wi6LkXyHU38MqXI= +github.com/chai2010/tiff v0.0.0-20211005095045-4ec2aa243943/go.mod h1:FhMMqekobM33oGdTfbi65oQ9P7bnQ5/0EDfmleW35RE= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/fclairamb/ftpserverlib v0.25.0 h1:swV2CK+WiN9KEkqkwNgGbSIfRoYDWNno41hoVtYwgfA= +github.com/fclairamb/ftpserverlib v0.25.0/go.mod h1:LIDqyiFPhjE9IuzTkntST8Sn8TaU6NRgzSvbMpdfRC4= +github.com/fclairamb/go-log v0.5.0 h1:Gz9wSamEaA6lta4IU2cjJc2xSq5sV5VYSB5w/SUHhVc= +github.com/fclairamb/go-log v0.5.0/go.mod h1:XoRO1dYezpsGmLLkZE9I+sHqpqY65p8JA+Vqblb7k40= +github.com/go-kit/log v0.2.1 h1:MRVx0/zhvdseW+Gza6N9rVzU/IVzaeE1SFI4raAhmBU= +github.com/go-kit/log v0.2.1/go.mod h1:NwTd00d/i8cPZ3xOwwiv2PO5MOcx78fFErGNcVmBjv0= +github.com/go-logfmt/logfmt v0.5.1 h1:otpy5pqBCBZ1ng9RQ0dPu4PN7ba75Y/aA+UpowDyNVA= +github.com/go-logfmt/logfmt v0.5.1/go.mod h1:WYhtIu8zTZfxdn5+rREduYbwxfcBr/Vr6KEVveWlfTs= +github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg= +github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/hhrutter/lzw v1.0.0 h1:laL89Llp86W3rRs83LvKbwYRx6INE8gDn0XNb1oXtm0= +github.com/hhrutter/lzw v1.0.0/go.mod h1:2HC6DJSn/n6iAZfgM3Pg+cP1KxeWc3ezG8bBqW5+WEo= +github.com/hhrutter/pkcs7 v0.2.0 h1:i4HN2XMbGQpZRnKBLsUwO3dSckzgX142TNqY/KfXg+I= +github.com/hhrutter/pkcs7 v0.2.0/go.mod h1:aEzKz0+ZAlz7YaEMY47jDHL14hVWD6iXt0AgqgAvWgE= +github.com/hhrutter/tiff v1.0.2 h1:7H3FQQpKu/i5WaSChoD1nnJbGx4MxU5TlNqqpxw55z8= +github.com/hhrutter/tiff v1.0.2/go.mod h1:pcOeuK5loFUE7Y/WnzGw20YxUdnqjY1P0Jlcieb/cCw= +github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc= +github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= +github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646 h1:zYyBkD/k9seD2A7fsi6Oo2LfFZAehjjQMERAvZLEDnQ= +github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646/go.mod h1:jpp1/29i3P1S/RLdc7JQKbRpFeM1dOBd8T9ki5s+AY8= +github.com/pdfcpu/pdfcpu v0.10.2 h1:DB2dWuoq0eF0QwHjgyLirYKLTCzFOoZdmmIUSu72aL0= +github.com/pdfcpu/pdfcpu v0.10.2/go.mod h1:Q2Z3sqdRqHTdIq1mPAUl8nfAoim8p3c1ASOaQ10mCpE= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= +github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ= +github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= +github.com/secsy/goftp v0.0.0-20200609142545-aa2de14babf4 h1:PT+ElG/UUFMfqy5HrxJxNzj3QBOf7dZwupeVC+mG1Lo= +github.com/secsy/goftp v0.0.0-20200609142545-aa2de14babf4/go.mod h1:MnkX001NG75g3p8bhFycnyIjeQoOjGL6CEIsdE/nKSY= +github.com/spf13/afero v1.14.0 h1:9tH6MapGnn/j0eb0yIXiLjERO8RB6xIVZRDCX7PtqWA= +github.com/spf13/afero v1.14.0/go.mod h1:acJQ8t0ohCGuMN3O+Pv0V0hgMxNYDlvdk+VTfyZmbYo= +github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= +github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +golang.org/x/crypto v0.37.0 h1:kJNSjF/Xp7kU0iB2Z+9viTPMW4EqqsrywMXLJOOsXSE= +golang.org/x/crypto v0.37.0/go.mod h1:vg+k43peMZ0pUMhYmVAWysMK35e6ioLh3wB8ZCAfbVc= +golang.org/x/image v0.26.0 h1:4XjIFEZWQmCZi6Wv8BoxsDhRU3RVnLX04dToTDAEPlY= +golang.org/x/image v0.26.0/go.mod h1:lcxbMFAovzpnJxzXS3nyL83K27tmqtKzIJpctK8YO5c= +golang.org/x/sys v0.32.0 h1:s77OFDvIQeibCmezSnk/q6iAfkdiQaJi4VzroCFrN20= +golang.org/x/sys v0.32.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/text v0.24.0 h1:dd5Bzh4yt5KYA8f9CJHCP4FB4D51c2c6JvN37xJJkJ0= +golang.org/x/text v0.24.0/go.mod h1:L8rBsPeo2pSS+xqN0d5u2ikmjtmoJbDBT1b7nHvFCdU= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +neilpa.me/go-jfif v0.5.0 h1:MNx8t0K4ysVKywCb5O98FhN3CcYih7rAUpCehj5HgtQ= +neilpa.me/go-jfif v0.5.0/go.mod h1:EbYXM1sxsAfbRgty+I5j1Ommm6WTsLKT+KXNgC998d0= +neilpa.me/go-x v0.2.0 h1:GbLRDtAZ9MgVrwrIe3jWnHF2W40LCFA9Ng/aDbd9GVs= +neilpa.me/go-x v0.2.0/go.mod h1:aIemU+pQYLLV3dygXotHKF7SantXe5HzZR6VIjzY/4g= diff --git a/init.go b/init.go index 6017508..0c527df 100644 --- a/init.go +++ b/init.go @@ -1,6 +1,6 @@ package main -import ( +/*import ( "path/filepath" "time" ) @@ -12,4 +12,4 @@ func init() { Interval: 5 * time.Second, } runner.Run() -} +}*/ diff --git a/jsconfig.json b/jsconfig.json new file mode 100644 index 0000000..3c541ae --- /dev/null +++ b/jsconfig.json @@ -0,0 +1,7 @@ +{ + "compilerOptions": { + "alwaysStrict": true, + "checkJs": true, + "target": "ES6", + }, +} \ No newline at end of file diff --git a/main.go b/main.go index 737674f..e824dbb 100644 --- a/main.go +++ b/main.go @@ -1,12 +1,83 @@ package main -import "log" +import ( + "Scanyonero/document" + "Scanyonero/ftpserver" + "Scanyonero/unit" + "log" + "net/http" + "path/filepath" + "regexp" +) func main() { - log.Printf("Starting OCRmyPDF-runner.") + log.Printf("Starting Scanyonero.") - // The runners will be started in some init.go file. + // Start FTP server. + ftpServer, err := ftpserver.NewFTPServer("", "", "127.0.0.1:21") + if err != nil { + log.Panicf("Failed to start FTP server: %v.", err) + } + defer ftpServer.Stop() - // Wait forever. - select {} + // Web and WS server. + server := NewServer() + + // Add test documents. + server.Documents.Append(LoadExampleQueueEntries()...) + server.Documents.Append(LoadExampleQueueEntries()...) + server.Documents.Append(LoadExampleQueueEntries()...) + server.Documents.Append(LoadExampleQueueEntries()...) + + go func() { + ingestor := document.Ingestor{ + DefaultDPI: unit.PerInch(150), + Rules: []document.IngestorRule{{ + Match: struct { + Name *regexp.Regexp + XPixels *int + YPixels *int + }{ + Name: regexp.MustCompile(`^.*\.pdf$`), + }, + Action: struct { + MediumWidth *unit.Millimeter + MediumHeight *unit.Millimeter + ScanOffsetX *unit.Millimeter + ScanOffsetY *unit.Millimeter + }{ + MediumWidth: &([]unit.Millimeter{100}[0]), + }, + }}, + } + + for file := range ftpServer.FileChan() { + docPages, err := ingestor.Ingest(file) + if err != nil { + log.Printf("Failed to ingest document file %q: %v.", file.Name, err) + continue + } + + var entries []QueueEntry + for _, page := range docPages { + entries = append(entries, QueueEntry{ + ID: NewQueueEntryID(), + Name: filepath.Base(file.Name), + QueueEntryData: QueueEntryDataPage{Page: &page}, + }) + } + entries = append(entries, QueueEntry{ID: NewQueueEntryID(), QueueEntryData: QueueEntryDataSeparator{}}) + + server.Documents.Lock() + server.Documents.Append(entries...) + server.Documents.Unlock() + } + }() + + http.Handle("/", server) + + log.Println("Server is running on port 8080.") + if err := http.ListenAndServe(":8080", nil); err != nil { + log.Panicf("ListenAndServe failed: %v.", err) + } } diff --git a/ocrmypdf/cli-options.go b/ocrmypdf/cli-options.go new file mode 100644 index 0000000..caf451d --- /dev/null +++ b/ocrmypdf/cli-options.go @@ -0,0 +1,13 @@ +package ocrmypdf + +type CLIOptions struct { + Custom []string // Custom command line options go here. +} + +// Args returns a list of CLI arguments that should be passed to the executable. +func (c *CLIOptions) Args() (result []string) { + + result = append(result, c.Custom...) + + return +} diff --git a/ocrmypdf/cli.go b/ocrmypdf/cli.go new file mode 100644 index 0000000..8eeb146 --- /dev/null +++ b/ocrmypdf/cli.go @@ -0,0 +1,88 @@ +package ocrmypdf + +import ( + "bytes" + "fmt" + "io" + "os/exec" + "strings" +) + +type CLI struct { + // The OCRmyPDF executable path can be overridden here. Otherwise the default path will be used. + // Special cases: + // - "py -m ocrmypdf": This will run "py -m ocrmypdf". + ExecutablePath string +} + +// executableAndArgs returns the executable name and its base arguments. +func (c CLI) executableAndArgs() (string, []string) { + // Get path of executable. + execPath := ExecutablePath + if c.ExecutablePath != "" { + execPath = c.ExecutablePath + } + + // Special cases. + var args []string + switch execPath { + case "py -m ocrmypdf": + execPath, args = "py", []string{"-m", "ocrmypdf"} + } + + return execPath, args +} + +// VersionString returns the version string as returned by OCRmyPDF. +func (c CLI) VersionString() (string, error) { + execPath, args := c.executableAndArgs() + + args = append(args, "--version") + + cmd := exec.Command(execPath, args...) + + var output, errBuffer bytes.Buffer + cmd.Stdout = &output + cmd.Stderr = &errBuffer + + if err := cmd.Run(); err != nil { + switch err := err.(type) { + case *exec.ExitError: + return "", fmt.Errorf("OCRmyPDF stopped with exit code %v: %v", err.ExitCode(), errBuffer.String()) + default: + return "", err + } + } + + return strings.TrimRight(output.String(), "\n\r"), nil +} + +// Run takes a document from input, and writes the resulting document into output. +// The options parameter is optional. +func (c CLI) Run(input io.Reader, output io.Writer, options *CLIOptions) error { + execPath, args := c.executableAndArgs() + + if options != nil { + args = append(args, options.Args()...) + } + + args = append(args, "-", "-") + + cmd := exec.Command(execPath, args...) + cmd.Stdin = input + cmd.Stdout = output + + errBuffer := bytes.Buffer{} + cmd.Stderr = &errBuffer + + if err := cmd.Run(); err != nil { + switch err := err.(type) { + case *exec.ExitError: + return fmt.Errorf("OCRmyPDF stopped with exit code %v: %v", err.ExitCode(), errBuffer.String()) + default: + return err + } + } + + return nil +} diff --git a/ocrmypdf/cli_test.go b/ocrmypdf/cli_test.go new file mode 100644 index 0000000..121c921 --- /dev/null +++ b/ocrmypdf/cli_test.go @@ -0,0 +1,40 @@ +package ocrmypdf_test + +import ( + "Scanyonero/ocrmypdf" + "os" + "path/filepath" + "testing" +) + +func TestCLI_VersionString(t *testing.T) { + cli := ocrmypdf.CLI{} + + v, err := cli.VersionString() + if err != nil { + t.Fatalf("VersionString() returned error: %v", err) + } + if v == "" { + t.Errorf("Returned version string is empty: %v", v) + } +} + +func TestCLI_Run(t *testing.T) { + t.SkipNow() + + cli := ocrmypdf.CLI{} + + source, err := os.Open(filepath.Join(".", "..", "test-documents", "typst-example", "600 DPI Flatbed.pdf")) + if err != nil { + t.Fatalf("Couldn't open file: %v.", err) + } + + dest, err := os.Create(filepath.Join(".", "Output.pdf")) + if err != nil { + t.Fatalf("Couldn't create file: %v.", err) + } + + if err := cli.Run(source, dest, nil); err != nil { + t.Fatalf("Run() returned error: %v", err) + } +} diff --git a/ocrmypdf/cli_unix.go b/ocrmypdf/cli_unix.go new file mode 100644 index 0000000..263930d --- /dev/null +++ b/ocrmypdf/cli_unix.go @@ -0,0 +1,6 @@ +//go:build unix + +package ocrmypdf + +// The path to the OCRmyPDF executable. +var ExecutablePath = "ocrmypdf" diff --git a/ocrmypdf/cli_windows.go b/ocrmypdf/cli_windows.go new file mode 100644 index 0000000..7d8f248 --- /dev/null +++ b/ocrmypdf/cli_windows.go @@ -0,0 +1,6 @@ +//go:build windows + +package ocrmypdf + +// The path to the OCRmyPDF executable. +var ExecutablePath = "py -m ocrmypdf" diff --git a/queue-entry-data-page.go b/queue-entry-data-page.go new file mode 100644 index 0000000..55c24a0 --- /dev/null +++ b/queue-entry-data-page.go @@ -0,0 +1,11 @@ +package main + +import "Scanyonero/document" + +type QueueEntryDataPage struct { + Page *document.Page `json:"page"` +} + +func (q QueueEntryDataPage) QueueEntryDataType() string { + return "Page" +} diff --git a/queue-entry-data-separator.go b/queue-entry-data-separator.go new file mode 100644 index 0000000..32eec56 --- /dev/null +++ b/queue-entry-data-separator.go @@ -0,0 +1,7 @@ +package main + +type QueueEntryDataSeparator struct{} + +func (q QueueEntryDataSeparator) QueueEntryDataType() string { + return "Separator" +} diff --git a/queue-entry-data.go b/queue-entry-data.go new file mode 100644 index 0000000..25ad020 --- /dev/null +++ b/queue-entry-data.go @@ -0,0 +1,5 @@ +package main + +type QueueEntryData interface { + QueueEntryDataType() string +} diff --git a/queue-entry.go b/queue-entry.go new file mode 100644 index 0000000..19a02b2 --- /dev/null +++ b/queue-entry.go @@ -0,0 +1,41 @@ +package main + +import ( + "encoding/json" + "fmt" + "sync/atomic" +) + +var queueEntryIDCounter atomic.Uint32 + +// NewQueueEntryID returns a unique document id. +func NewQueueEntryID() QueueEntryID { + return QueueEntryID(queueEntryIDCounter.Add(1)) +} + +type QueueEntryID int + +// QueueEntry can contain a single or multiple scanned pages. +type QueueEntry struct { + ID QueueEntryID `json:"id"` + Name string `json:"name"` + + QueueEntryData +} + +func (q QueueEntry) MarshalJSON() ([]byte, error) { + if q.QueueEntryData == nil { + return nil, fmt.Errorf("queue entry doesn't contain any data") + } + + type embedded QueueEntry // Prevent recursion. + return json.Marshal( + struct { + embedded + Type string `json:"type"` + }{ + embedded: embedded(q), + Type: q.QueueEntryDataType(), + }, + ) +} diff --git a/queue.go b/queue.go new file mode 100644 index 0000000..0569f68 --- /dev/null +++ b/queue.go @@ -0,0 +1,186 @@ +package main + +import ( + "slices" + "sync" +) + +// Queue contains a list of scanned documents. +// A user can issue operations on these entries. +// +// The list is synced between the server and clients via websockets. +type Queue struct { + sync.Mutex + + Documents []QueueEntry + + listeners map[chan<- ServerWebsocketPacket]struct{} +} + +// RegisterListener will add the given channel c to receive updates in the form of websocket packets. +// +// UnregisterListener must be called before the channel can be closed or stopped reading from. +func (d *Queue) RegisterListener(c chan<- ServerWebsocketPacket) { + d.Lock() + defer d.Unlock() + + if d.listeners == nil { + d.listeners = make(map[chan<- ServerWebsocketPacket]struct{}) + } + + d.listeners[c] = struct{}{} + + // Send current document queue. + c <- &ServerWebsocketPacketQueueReplace{Documents: d.Documents} +} + +// UnregisterListener will stop the given listener from receiving updates. +// Upon return, the listener will not receive any updates from the queue. +func (d *Queue) UnregisterListener(c chan<- ServerWebsocketPacket) { + d.Lock() + defer d.Unlock() + + if d.listeners == nil { + d.listeners = make(map[chan<- ServerWebsocketPacket]struct{}) + } + + delete(d.listeners, c) +} + +// Broadcast will send a websocket packet to all registered listeners. +// +// The Queue must be locked when calling this. +func (d *Queue) Broadcast(p ...ServerWebsocketPacket) { + for listener := range d.listeners { + for _, packet := range p { + listener <- packet + } + } +} + +// DeleteAt removes all elements at [i:j]. +// +// This will automatically limit the indices to valid ranges. +// +// The Queue must be locked when calling this. +func (d *Queue) DeleteAt(i, j int) { + i = max(0, min(i, len(d.Documents)-1)) // Limit to [0; len). + j = max(0, min(j, len(d.Documents))) // Limit to [0; len]. + if i >= j { + return + } + + d.Documents = slices.Delete(d.Documents, i, j) + + d.Broadcast(&ServerWebsocketPacketQueueDeleteAt{IndexA: i, IndexB: j}) +} + +// Delete removes the elements with the given DocumentIDs. +// +// The Queue must be locked when calling this. +func (d *Queue) Delete(ids ...QueueEntryID) { + for i, doc := range slices.Backward(d.Documents) { + if slices.Contains(ids, doc.ID) { + d.DeleteAt(i, i+1) + } + } +} + +// InsertAt inserts the given document at index i. +// +// Documents will be shifted accordingly, valid indices are in the range of [0; len(queue)]. +// This will automatically limit the index to valid ranges. +// +// The Queue must be locked when calling this. +func (d *Queue) InsertAt(i int, documents ...QueueEntry) { + i = max(0, min(i, len(d.Documents))) // Limit to [0; len]. + + d.Documents = slices.Insert(d.Documents, i, documents...) + + d.Broadcast(&ServerWebsocketPacketQueueInsertAt{Index: i, Documents: documents}) +} + +// Append will add the given documents to the end of the queue. +// +// The Queue must be locked when calling this. +func (d *Queue) Append(documents ...QueueEntry) { + d.InsertAt(len(d.Documents), documents...) +} + +// Replace will replace the whole list of documents with the given one. +// +// The Queue must be locked when calling this. +func (d *Queue) Replace(documents ...QueueEntry) { + d.Documents = slices.Clone(documents) + + d.Broadcast(&ServerWebsocketPacketQueueReplace{Documents: documents}) +} + +// ShiftAt will move the element at index i by the given offset. +// +// This will automatically limit the index and the offset to valid ranges. +// +// The Queue must be locked when calling this. +func (d *Queue) ShiftAt(i, offset int) { + if len(d.Documents) <= 0 { + return + } + i = max(0, min(i, len(d.Documents)-1)) // Limit to [0; len). + offset = max(-i, min(offset, len(d.Documents)-i-1)) // Limit to [-i; len-i-1]. + + for tempOffset, i := offset, i; tempOffset != 0; { + switch { + case tempOffset > 0: + d.Documents[i], d.Documents[i+1] = d.Documents[i+1], d.Documents[i] + tempOffset-- + i++ + case offset < 0: + d.Documents[i], d.Documents[i-1] = d.Documents[i-1], d.Documents[i] + tempOffset++ + i-- + } + } + + d.Broadcast(&ServerWebsocketPacketQueueShiftAt{Index: i, Offset: offset}) +} + +// Shift will move the index of all elements with the given DocumentIDs by offset. +// +// The Queue must be locked when calling this. +func (d *Queue) Shift(offset int, ids ...QueueEntryID) { + switch { + case offset < 0: + for i, entry := range d.Documents { + if slices.Contains(ids, entry.ID) { + if offset < -i { + offset = -i + } + d.ShiftAt(i, offset) + } + } + + case offset > 0: + for i, entry := range slices.Backward(d.Documents) { + if slices.Contains(ids, entry.ID) { + if offset > len(d.Documents)-i-1 { + offset = len(d.Documents) - i - 1 + } + d.ShiftAt(i, offset) + } + } + } +} + +// QueueEntryByID returns the QueueEntry with the given ID. +// +// The Queue must be locked when calling this. +func (d *Queue) QueueEntryByID(id QueueEntryID) *QueueEntry { + for i := range d.Documents { + document := &d.Documents[i] + if document.ID == id { + return document + } + } + + return nil +} diff --git a/server-websocket-packet.go b/server-websocket-packet.go new file mode 100644 index 0000000..6bddccb --- /dev/null +++ b/server-websocket-packet.go @@ -0,0 +1,12 @@ +package main + +// ServerWebsocketPacket represents a websocket packet. +type ServerWebsocketPacket interface { + Type() string +} + +var serverWebsocketPacketRegistry map[string]ServerWebsocketPacket = map[string]ServerWebsocketPacket{} + +func ServerWebsocketPacketRegister(prototype ServerWebsocketPacket) { + serverWebsocketPacketRegistry[prototype.Type()] = prototype +} diff --git a/server-websocket-packets.go b/server-websocket-packets.go new file mode 100644 index 0000000..7363436 --- /dev/null +++ b/server-websocket-packets.go @@ -0,0 +1,70 @@ +package main + +// ServerWebsocketPacketQueueDeleteAt represents a delete operation on a document queue list. +// The range of the deleted indices is [IndexA; IndexB). +type ServerWebsocketPacketQueueDeleteAt struct { + IndexA int `json:"indexA"` // Starting index of the deletion range. + IndexB int `json:"indexB"` // End index of the deletion range. This index is not included in the range. +} + +func (s *ServerWebsocketPacketQueueDeleteAt) Type() string { return "QueueDeleteAt" } + +func init() { ServerWebsocketPacketRegister(&ServerWebsocketPacketQueueDeleteAt{}) } + +// ServerWebsocketPacketQueueDelete represents a delete operation on a document queue list. +type ServerWebsocketPacketQueueDelete struct { + IDs []QueueEntryID `json:"ids"` // IDs of the documents. +} + +func (s *ServerWebsocketPacketQueueDelete) Type() string { return "QueueDelete" } + +func init() { ServerWebsocketPacketRegister(&ServerWebsocketPacketQueueDelete{}) } + +// ServerWebsocketPacketQueueInsertAt represents an insert operation on a document queue list. +type ServerWebsocketPacketQueueInsertAt struct { + Index int `json:"index"` + Documents []QueueEntry `json:"documents"` +} + +func (s *ServerWebsocketPacketQueueInsertAt) Type() string { return "QueueInsertAt" } + +func init() { ServerWebsocketPacketRegister(&ServerWebsocketPacketQueueInsertAt{}) } + +// ServerWebsocketPacketQueueReplace represents a replace operation on a document queue list. +type ServerWebsocketPacketQueueReplace struct { + Documents []QueueEntry `json:"documents"` +} + +func (s *ServerWebsocketPacketQueueReplace) Type() string { return "QueueReplace" } + +func init() { ServerWebsocketPacketRegister(&ServerWebsocketPacketQueueReplace{}) } + +// ServerWebsocketPacketQueueShiftAt represents a shift operation on a document queue list. +type ServerWebsocketPacketQueueShiftAt struct { + Index int `json:"index"` // Index of the to be shifted element. + Offset int `json:"offset"` // Shift offset. +} + +func (s *ServerWebsocketPacketQueueShiftAt) Type() string { return "QueueShiftAt" } + +func init() { ServerWebsocketPacketRegister(&ServerWebsocketPacketQueueShiftAt{}) } + +// ServerWebsocketPacketQueueShift represents a shift operation on a document queue list. +type ServerWebsocketPacketQueueShift struct { + IDs []QueueEntryID `json:"ids"` // IDs of the documents. + Offset int `json:"offset"` // Shift offset. +} + +func (s *ServerWebsocketPacketQueueShift) Type() string { return "QueueShift" } + +func init() { ServerWebsocketPacketRegister(&ServerWebsocketPacketQueueShift{}) } + +// ServerWebsocketPacketQueueUpdate represents an update operation of documents in a queue list. +// The receiver should update any of the received documents in their local queue list. +type ServerWebsocketPacketQueueUpdate struct { + Documents []QueueEntry `json:"documents"` +} + +func (s *ServerWebsocketPacketQueueUpdate) Type() string { return "QueueUpdate" } + +func init() { ServerWebsocketPacketRegister(&ServerWebsocketPacketQueueUpdate{}) } diff --git a/server-websocket.go b/server-websocket.go new file mode 100644 index 0000000..594858c --- /dev/null +++ b/server-websocket.go @@ -0,0 +1,101 @@ +package main + +import ( + "encoding/json" + "log" + "net/http" + + "github.com/gorilla/websocket" +) + +var upgrader = websocket.Upgrader{} + +func (s *Server) handleWebSocket(w http.ResponseWriter, r *http.Request) { + conn, err := upgrader.Upgrade(w, r, nil) + if err != nil { + log.Printf("Upgrading connection to WS failed: %v.", err) + return + } + defer conn.Close() + + sendChan := make(chan ServerWebsocketPacket) + defer close(sendChan) + + // Goroutine for sending packets. + go func(sendChan <-chan ServerWebsocketPacket) { + for packet := range sendChan { + message := struct { + Type string `json:"type"` + Payload any `json:"payload"` + }{ + Type: packet.Type(), + Payload: packet, + } + + messageData, err := json.Marshal(message) + if err != nil { + log.Printf("Failed to marshal websocket packet: %v.", err) + continue + } + + conn.WriteMessage(websocket.TextMessage, messageData) + } + }(sendChan) + + // Register listener on document queue updates. + s.Documents.RegisterListener(sendChan) + defer s.Documents.UnregisterListener(sendChan) + + // Main loop that receives packets. + for { + messageType, data, err := conn.ReadMessage() + if err != nil { + log.Printf("Reading WS message failed: %v.", err) + break + } + + switch messageType { + case websocket.CloseMessage: + log.Printf("Connection %v closed.", conn.LocalAddr()) + return + + case websocket.TextMessage: + //log.Printf("Message from %v: %s.", conn.LocalAddr(), data) + + var message struct { + Type string `json:"type"` + Payload json.RawMessage `json:"payload"` + } + + if err := json.Unmarshal(data, &message); err != nil { + log.Printf("Failed to marshal websocket packet from client %v: %v.", conn.LocalAddr(), err) + return + } + + prototype, ok := serverWebsocketPacketRegistry[message.Type] + if !ok { + log.Printf("Unknown websocket packet type %q from client %v.", message.Type, conn.LocalAddr()) + return + } + + if err := json.Unmarshal(message.Payload, prototype); err != nil { + log.Printf("Failed to marshal websocket packet payload from client %v: %v.", conn.LocalAddr(), err) + return + } + + switch packet := prototype.(type) { + case *ServerWebsocketPacketQueueDelete: + s.Documents.Lock() + s.Documents.Delete(packet.IDs...) + s.Documents.Unlock() + case *ServerWebsocketPacketQueueShift: + s.Documents.Lock() + s.Documents.Shift(packet.Offset, packet.IDs...) + s.Documents.Unlock() + default: + log.Printf("Websocket client %q sent unsupported packet type %T.", conn.LocalAddr(), prototype) + return + } + } + } +} diff --git a/server.go b/server.go new file mode 100644 index 0000000..666522b --- /dev/null +++ b/server.go @@ -0,0 +1,151 @@ +package main + +import ( + "fmt" + "image/jpeg" + "log" + "net/http" + "strconv" + + "github.com/nfnt/resize" +) + +type Server struct { + http.ServeMux + + Documents Queue +} + +func NewServer() *Server { + s := &Server{} + + s.Handle("/", http.FileServer(http.Dir("./static"))) + s.HandleFunc("GET /api/queue-entry-page/{id}/image", s.handleGetQueueEntryImage) + s.HandleFunc("GET /api/queue-entry-page/{id}/preview", s.handleGetQueueEntryPreview) + //s.HandleFunc("PUT /documents/{id}", addItem) + //s.HandleFunc("DELETE /documents/{id}", s.handleRemoveDocument) + s.HandleFunc("/ws", s.handleWebSocket) + + return s +} + +func (s *Server) handleGetQueueEntryImage(w http.ResponseWriter, r *http.Request) { + var id QueueEntryID + if i, err := strconv.ParseInt(r.PathValue("id"), 10, 0); err != nil { + w.WriteHeader(http.StatusBadRequest) + msg := fmt.Sprintf("Failed to parse document id: %v.", err) + w.Write([]byte(msg)) + log.Print(msg) + return + } else { + id = QueueEntryID(i) + } + + s.Documents.Lock() + defer s.Documents.Unlock() + + entry := s.Documents.QueueEntryByID(id) + if entry == nil { + w.WriteHeader(http.StatusNotFound) + msg := fmt.Sprintf("Failed to find %T with ID %v.", entry, id) + w.Write([]byte(msg)) + log.Print(msg) + return + } + + page, ok := entry.QueueEntryData.(QueueEntryDataPage) + if !ok { + w.WriteHeader(http.StatusInternalServerError) + msg := fmt.Sprintf("Entry %d isn't a page.", entry.ID) + w.Write([]byte(msg)) + log.Print(msg) + return + } + + if page.Page == nil { + w.WriteHeader(http.StatusInternalServerError) + msg := fmt.Sprintf("Entry %d doesn't contain any page data.", entry.ID) + w.Write([]byte(msg)) + log.Print(msg) + return + } + + if page.Page.Image == nil { + w.WriteHeader(http.StatusInternalServerError) + msg := fmt.Sprintf("Page %d doesn't contain any image.", entry.ID) + w.Write([]byte(msg)) + log.Print(msg) + return + } + + w.Header().Set("Content-Type", "image/jpeg") + if err := jpeg.Encode(w, page.Page.Image, nil); err != nil { + w.WriteHeader(http.StatusInternalServerError) + msg := fmt.Sprintf("Failed to encode JPEG: %v.", err) + w.Write([]byte(msg)) + log.Print(msg) + return + } +} + +func (s *Server) handleGetQueueEntryPreview(w http.ResponseWriter, r *http.Request) { + var id QueueEntryID + if i, err := strconv.ParseInt(r.PathValue("id"), 10, 0); err != nil { + w.WriteHeader(http.StatusBadRequest) + msg := fmt.Sprintf("Failed to parse document id: %v.", err) + w.Write([]byte(msg)) + log.Print(msg) + return + } else { + id = QueueEntryID(i) + } + + s.Documents.Lock() + defer s.Documents.Unlock() + + entry := s.Documents.QueueEntryByID(id) + if entry == nil { + w.WriteHeader(http.StatusNotFound) + msg := fmt.Sprintf("Failed to find %T with ID %v.", entry, id) + w.Write([]byte(msg)) + log.Print(msg) + return + } + + page, ok := entry.QueueEntryData.(QueueEntryDataPage) + if !ok { + w.WriteHeader(http.StatusInternalServerError) + msg := fmt.Sprintf("Entry %d isn't a page.", entry.ID) + w.Write([]byte(msg)) + log.Print(msg) + return + } + + if page.Page == nil { + w.WriteHeader(http.StatusInternalServerError) + msg := fmt.Sprintf("Entry %d doesn't contain any page data.", entry.ID) + w.Write([]byte(msg)) + log.Print(msg) + return + } + + if page.Page.Image == nil { + w.WriteHeader(http.StatusInternalServerError) + msg := fmt.Sprintf("Page %d doesn't contain any image.", entry.ID) + w.Write([]byte(msg)) + log.Print(msg) + return + } + + // Resize image to a preview with a width of about 512 pixels. + img := resize.Resize(512, 0, page.Page.Image, resize.Lanczos2) + + w.Header().Set("Content-Type", "image/jpeg") + if err := jpeg.Encode(w, img, nil); err != nil { + w.WriteHeader(http.StatusInternalServerError) + msg := fmt.Sprintf("Failed to encode JPEG: %v.", err) + w.Write([]byte(msg)) + log.Print(msg) + return + } +} diff --git a/service/linux/systemd/ocrmypdf-runner.service b/service/linux/systemd/ocrmypdf-runner.service deleted file mode 100644 index ec1f53b..0000000 --- a/service/linux/systemd/ocrmypdf-runner.service +++ /dev/null @@ -1,13 +0,0 @@ -[Unit] -Description=A runner that will watch directories and runs OCRmyPDF on files in them. - -[Service] -Restart=on-failure -RestartSec=60s -WorkingDirectory=/home/paperless/ocrmypdf-runner/ -ExecStart=go run . -User=paperless -Group=paperless - -[Install] -WantedBy=multi-user.target diff --git a/service/linux/systemd/scanyonero.service b/service/linux/systemd/scanyonero.service new file mode 100644 index 0000000..45c519d --- /dev/null +++ b/service/linux/systemd/scanyonero.service @@ -0,0 +1,10 @@ +[Unit] +Description=A server that will receive scanned documents via FTP, process them and send them to paperless. + +[Service] +Restart=on-failure +RestartSec=60s +ExecStart=go run . + +[Install] +WantedBy=multi-user.target diff --git a/static/index.html b/static/index.html new file mode 100644 index 0000000..ab3ad56 --- /dev/null +++ b/static/index.html @@ -0,0 +1,25 @@ + + + + + + + Scanyonero + + + + + + +
+ + +
+ + + + + \ No newline at end of file diff --git a/static/js/api.js b/static/js/api.js new file mode 100644 index 0000000..e3d8a24 --- /dev/null +++ b/static/js/api.js @@ -0,0 +1,128 @@ +export class API extends EventTarget { + + constructor() { + super(); + + this.#socketOpen(); + } + + /** @type {WebSocket} */ + #socket; + + /** + * Opens a new websocket connection, and adds all necessary listeners. + */ + #socketOpen() { + this.#socket = new WebSocket("ws://" + location.host + "/ws"); + + this.#socket.onopen = event => { + console.log(`Websocket connection opened.`) + } + + this.#socket.onmessage = event => { + /** @type {{type: string, payload: any}} */ + const data = JSON.parse(event.data); + + switch (data.type) { + case "QueueDeleteAt": { + /** @type {CustomEventInit} */ + const eventData = { detail: data.payload }; + this.dispatchEvent(new CustomEvent("queuedeleteat", eventData)); + break; + } + case "QueueInsertAt": { + /** @type {CustomEventInit} */ + const eventData = { detail: data.payload }; + this.dispatchEvent(new CustomEvent("queueinsertat", eventData)); + break; + } + case "QueueReplace": { + /** @type {CustomEventInit} */ + const eventData = { detail: data.payload }; + this.dispatchEvent(new CustomEvent("queuereplace", eventData)); + break; + } + case "QueueShiftAt": { + /** @type {CustomEventInit} */ + const eventData = { detail: data.payload }; + this.dispatchEvent(new CustomEvent("queueshiftat", eventData)); + break; + } + default: + console.error(`Unknown websocket data type "${data.type}"`); + break; + } + } + + this.#socket.onclose = event => { + console.log(`Socket is closed. Reconnect will be attempted in 1 second. Code: ${event.code} Reason: ${event.reason} WasClean: ${event.wasClean}.`); + setTimeout(() => { + this.#socketOpen(); + }, 1000); + }; + + this.#socket.onerror = event => { + console.error(`Socket encountered error: ${event}. Closing socket.`); + this.#socket.close(); + }; + } + + /** + * Sends a document queue delete request to the server. + * @param {...number} ids Document ids. + */ + queueDelete(...ids) { + if (this.#socket.readyState !== WebSocket.OPEN) { + return + } + + /** @type {{type: string, payload: import("./model").APIPacketQueueDelete}} */ + const message = { type: "QueueDelete", payload: { ids: ids } }; + this.#socket.send(JSON.stringify(message)); + } + + /** + * Sends a document queue shift request to the server. + * @param {number} offset + * @param {...number} ids Document ids. + */ + queueShift(offset, ...ids) { + if (this.#socket.readyState !== WebSocket.OPEN) { + return + } + + /** @type {{type: string, payload: import("./model").APIPacketQueueShift}} */ + const message = { type: "QueueShift", payload: { offset: offset, ids: ids } }; + this.#socket.send(JSON.stringify(message)); + } + + /** + * + * @param {"GET"|"POST"|"DELETE"|"UPDATE"} method + * @param {string} url + */ + #ajaxRequest(method, url) { + return new Promise(function (resolve, reject) { + const xhr = new XMLHttpRequest(); + xhr.open(method, url); + xhr.onload = function () { + if (this.status >= 200 && this.status < 300) { + resolve(this.response); + } else { + reject({ + status: this.status, + statusText: this.statusText, + }); + } + }; + xhr.onerror = function () { + reject({ + status: this.status, + statusText: this.statusText, + }); + }; + xhr.send(); + }); + } + +} \ No newline at end of file diff --git a/static/js/app.js b/static/js/app.js new file mode 100644 index 0000000..8c66b0b --- /dev/null +++ b/static/js/app.js @@ -0,0 +1,25 @@ +import { API } from './api.js'; +import { DocumentMenu } from './components/document-menu.js'; +import { DocumentQueue } from './components/document-queue.js'; + +export class App { + /** @type {API} */ + #api; + + /** @type {DocumentMenu} */ + #documentMenu; + + /** @type {DocumentQueue} */ + #documentQueue; + + constructor() { + this.#api = new API(); + + this.#documentMenu = document.querySelector("#document-menu"); + this.#documentQueue = document.querySelector("#document-queue"); + + this.#documentMenu.documentQueue = this.#documentQueue; + this.#documentMenu.api = this.#api; + this.#documentQueue.api = this.#api; + } +} diff --git a/static/js/components/document-menu.js b/static/js/components/document-menu.js new file mode 100644 index 0000000..10019c5 --- /dev/null +++ b/static/js/components/document-menu.js @@ -0,0 +1,126 @@ +import { API } from '../api.js'; +import { LitElement, css, html } from '../vendor/lit-html/lit-all.min.js'; +import { DocumentQueue } from './document-queue.js'; + +export class DocumentMenu extends LitElement { + static properties = { + selectionAll: { type: Boolean }, + selectionIndeterminate: { type: Boolean }, + }; + + /** @type {API} */ + api; + + /** @type {DocumentQueue|undefined} */ + #documentQueue; + + /** + * @param {DocumentQueue} documentQueue + */ + set documentQueue(documentQueue) { + this.#documentQueue = documentQueue; + + this.#documentQueue.addEventListener("changeselection", /** @param {import('./document-queue.js').DocumentQueueEventChangeSelection} event */(event) => { + switch (event.detail.selectedIDs.length) { + case 0: + this.selectionAll = false; this.selectionIndeterminate = false; + break; + + case event.detail.allIDs.length: + this.selectionAll = true; this.selectionIndeterminate = false; + break; + + default: + this.selectionAll = false; this.selectionIndeterminate = true; + break; + } + }); + } + + constructor() { + super(); + this.selectionAll = false; + this.selectionIndeterminate = false; + } + + onCheckboxChange(event) { + switch (event.target.checked) { + case true: + this.selectionAll = true; this.selectionIndeterminate = false; + this.#documentQueue.selectAll(true); + break; + + default: + this.selectionAll = false; this.selectionIndeterminate = false; + this.#documentQueue.selectAll(false); + break; + } + } + + static styles = css` + :host { + display: flex; + padding: 8px; + + background-color: black; + } + + #select-all { + align-self: center; + margin-left: 8px; + margin-right: 16px; + width: 24px; + height: 24px; + } + + #buttons { + display: flex; + flex-direction: row; + flex-wrap: wrap; + gap: 16px; + } + + button { + padding: 8px; + } + `; + + // @ts-ignore + render() { + return html` + +
+ + + + +
+ `; + } + + /** @param {Event} event */ + onButtonUpwards(event) { + if (this.api == undefined || this.#documentQueue == undefined) { return } + + const sInfo = this.#documentQueue.selectionInfo(); + this.api.queueShift(-1, ...sInfo.selectedIDs); + } + + /** @param {Event} event */ + onButtonDownwards(event) { + if (this.api == undefined || this.#documentQueue == undefined) { return } + + const sInfo = this.#documentQueue.selectionInfo(); + this.api.queueShift(1, ...sInfo.selectedIDs); + } + + /** @param {Event} event */ + onButtonDelete(event) { + if (this.api == undefined || this.#documentQueue == undefined) { return } + + const sInfo = this.#documentQueue.selectionInfo(); + this.api.queueDelete(...sInfo.selectedIDs); + } +} + +customElements.define("document-menu", DocumentMenu); diff --git a/static/js/components/document-queue-entry-page.js b/static/js/components/document-queue-entry-page.js new file mode 100644 index 0000000..a588d93 --- /dev/null +++ b/static/js/components/document-queue-entry-page.js @@ -0,0 +1,35 @@ +import { API } from '../api.js'; +import { LitElement, css, html, repeat } from '../vendor/lit-html/lit-all.min.js'; + +export class DocumentQueueEntryPage extends LitElement { + static properties = { + queueEntry: { type: Object }, + + api: { type: Object, state: true }, + }; + + constructor() { + super(); + + /** @type {API} */ + this.api; + /** @type {import('model').APIQueueEntry} */ + this.document; + } + + static styles = css` + img { + width: 128px; + } + `; + + // @ts-ignore + render() { + return html` + + This is a document + `; + } +} + +customElements.define("document-queue-entry-page", DocumentQueueEntryPage); diff --git a/static/js/components/document-queue-entry-separator.js b/static/js/components/document-queue-entry-separator.js new file mode 100644 index 0000000..ae7f44b --- /dev/null +++ b/static/js/components/document-queue-entry-separator.js @@ -0,0 +1,35 @@ +import { API } from '../api.js'; +import { LitElement, css, html, repeat } from '../vendor/lit-html/lit-all.min.js'; + +export class DocumentQueueEntrySeparator extends LitElement { + static properties = { + queueEntry: { type: Object }, + + api: { type: Object, state: true }, + }; + + constructor() { + super(); + + /** @type {API} */ + this.api; + /** @type {import('model').APIQueueEntry} */ + this.document; + } + + static styles = css` + :host { + width: 100%; + background: black; + } + `; + + // @ts-ignore + render() { + return html` + + `; + } +} + +customElements.define("document-queue-entry-separator", DocumentQueueEntrySeparator); diff --git a/static/js/components/document-queue-entry.js b/static/js/components/document-queue-entry.js new file mode 100644 index 0000000..10f4522 --- /dev/null +++ b/static/js/components/document-queue-entry.js @@ -0,0 +1,126 @@ +import { API } from '../api.js'; +import { LitElement, css, html, repeat } from '../vendor/lit-html/lit-all.min.js'; +import './document-queue-entry-page.js'; +import './document-queue-entry-separator.js'; + +/** @typedef {{selected: boolean}} DocumentQueueEntryEventChangeSelectionDetails */ +/** @typedef {CustomEvent} DocumentQueueEntryEventChangeSelection */ + +export class DocumentQueueEntry extends LitElement { + static properties = { + selected: { type: Boolean }, + queueEntry: { type: Object }, + + api: { type: Object, state: true }, + }; + + constructor() { + super(); + + this.selected = false; + /** @type {API} */ + this.api; + /** @type {import('model').APIQueueEntry} */ + this.queueEntry; + } + + static styles = css` + :host { + padding: 8px; + display: flex; + flex-direction: row; + gap: 8px; + background-color: rgba(0, 0, 0, 0.1); + border-radius: 8px; + } + + #left-bar { + position: relative; + display: flex; + flex-direction: column; + justify-content: center; + } + + #checkbox-selected { + align-self: center; + width: 24px; + height: 24px; + } + + #button-swap { + width: 32px; + height: 32px; + padding: 0px; + position: absolute; + bottom: 0px; + } + `; + + // @ts-ignore + render() { + let embeddedElement; + switch (this.queueEntry.type) { + case "Page": + embeddedElement = html``; break; + case "Separator": + embeddedElement = html``; break; + default: + embeddedElement = html`Unsupported entry type!` + } + + return html` +
+ + +
+ + ${embeddedElement} + `; + } + + /** @param {Event} event */ + onCheckboxChange(event) { + // @ts-ignore + this.selected = event.target.checked; + + /** @type {CustomEventInit} */ + const eventData = { detail: { selected: this.selected } }; + this.dispatchEvent(new CustomEvent("changeselection", eventData)); + } + + /** + * Used for FLIP animations. + * @type {DOMRect} + */ + #oldBoundingClientRect; + + prepareFLIP() { + this.#oldBoundingClientRect = this.getBoundingClientRect(); + } + + doFLIP() { + const oldRect = this.#oldBoundingClientRect; + if (oldRect == undefined) { + return; + } + + const newRect = this.getBoundingClientRect(); + + const deltaX = oldRect.left - newRect.left; + const deltaY = oldRect.top - newRect.top; + + if (Math.abs(deltaX) >= 1 || Math.abs(deltaY) >= 1) { + this.animate([{ + transform: `translate(${deltaX}px, ${deltaY}px)` + }, { + transform: 'none' + }], { + duration: 150, + easing: 'ease-out', + fill: 'both', + }); + } + } +} + +customElements.define("document-queue-entry", DocumentQueueEntry); diff --git a/static/js/components/document-queue.js b/static/js/components/document-queue.js new file mode 100644 index 0000000..1ce8b20 --- /dev/null +++ b/static/js/components/document-queue.js @@ -0,0 +1,169 @@ +import { API } from '../api.js'; +import { DocumentQueueEntry } from './document-queue-entry.js' + +/** @typedef {{selectedIDs: number[], allIDs: number[]}} DocumentQueueEventChangeSelectionDetails */ +/** @typedef {CustomEvent} DocumentQueueEventChangeSelection */ + +// TODO: Use LitElement, and use repeat directive, which also keeps the DOM state when shuffling elements around +export class DocumentQueue extends HTMLElement { + /** @type {API|undefined} */ + #api; + + /** @param {API} api */ + set api(api) { + this.#api = api; + + this.#api.addEventListener("queuedeleteat", /** @param {import("model").APIEvents["queuedeleteat"]} event */(event) => { + this.queueDeleteAt(event.detail.indexA, event.detail.indexB); + }); + this.#api.addEventListener("queueinsertat", /** @param {import("model").APIEvents["queueinsertat"]} event */(event) => { + this.queueInsertAt(event.detail.index, event.detail.documents); + }); + this.#api.addEventListener("queueshiftat", /** @param {import("model").APIEvents["queueshiftat"]} event */(event) => { + this.queueShiftAt(event.detail.index, event.detail.offset); + }); + this.#api.addEventListener("queuereplace", /** @param {import("model").APIEvents["queuereplace"]} event */(event) => { + this.queueReplace(event.detail.documents); + }); + } + + connectedCallback() { + this.style.display = "flex"; + this.style.gap = "8px"; + this.style.flexDirection = "column"; + this.style.padding = "8px"; + } + + selectionInfo() { + const children = Array.from(this.children); + + const result = {}; + result.selectedIDs = children.filter(/** @param {DocumentQueueEntry} value */ value => { return value.selected; }).map(/** @param {DocumentQueueEntry} value */(value) => { return value.queueEntry.id; }); + result.allIDs = children.map(/** @param {DocumentQueueEntry} value */ value => { return value.queueEntry.id; }); + + return result; + } + + updateSelection() { + /** @type {CustomEventInit} */ + const eventData = { detail: this.selectionInfo() }; + this.dispatchEvent(new CustomEvent("changeselection", eventData)); + } + + /** + * + * @param {boolean} state + */ + selectAll(state) { + const children = Array.from(this.children); + + children.forEach(/** @param {DocumentQueueEntry} child */ child => { + child.selected = state; + }); + } + + /** + * Deletes a range of documents. + * @param {number} indexA // Start index. + * @param {number} indexB // End index. (Not included in the range). + */ + queueDeleteAt(indexA, indexB) { + // Store positions. + Array.from(this.children).forEach(/** @param {DocumentQueueEntry} child */ child => { + child.prepareFLIP(); + }); + + if (this.hasChildNodes()) { + const children = this.children; + for (let i = indexA; i < indexB; i++) { + this.removeChild(children[i]); + } + } + + // Start FLIP animation. + Array.from(this.children).forEach(/** @param {DocumentQueueEntry} child */ child => { + child.doFLIP(); + }); + + this.updateSelection(); + } + + /** + * Inserts a range of documents at the given index. + * @param {number} index + * @param {import('model').APIQueueEntry[]} documents + */ + queueInsertAt(index, documents) { + // Store positions. + Array.from(this.children).forEach(/** @param {DocumentQueueEntry} child */ child => { + child.prepareFLIP(); + }); + + documents.forEach(document => { + if (this.hasChildNodes() || this.children.length === index) { + const newChild = this.appendChild(new DocumentQueueEntry()); + newChild.api = this.#api; + newChild.queueEntry = document; + newChild.addEventListener("changeselection", e => this.updateSelection()); + } else { + const newChild = this.insertBefore(new DocumentQueueEntry(), this.childNodes[index]); + newChild.api = this.#api; + newChild.queueEntry = document; + newChild.addEventListener("changeselection", e => this.updateSelection()); + } + index++; + }); + + // Start FLIP animation. + Array.from(this.children).forEach(/** @param {DocumentQueueEntry} child */ child => { + child.doFLIP(); + }); + + this.updateSelection(); + } + + /** + * Replaces all documents currently in the list/queue. + * @param {import('model').APIQueueEntry[]} documents + */ + queueReplace(documents) { + this.innerHTML = ""; + + documents.forEach(document => { + const newChild = this.appendChild(new DocumentQueueEntry()); + newChild.api = this.#api; + newChild.queueEntry = document; + newChild.addEventListener("changeselection", e => this.updateSelection()); + }); + + this.updateSelection(); + } + + /** + * Shifts a single document entry by the given offset. + * @param {number} index + * @param {number} offset + */ + queueShiftAt(index, offset) { + // Store positions. + Array.from(this.children).forEach(/** @param {DocumentQueueEntry} child */ child => { + child.prepareFLIP(); + }); + + const child = this.children[index]; + child.remove(); + const newIndex = index + offset; + if (!this.hasChildNodes() || this.children.length === newIndex) { + this.appendChild(child); + } else { + this.insertBefore(child, this.children[newIndex]); + } + + // Start FLIP animation. + Array.from(this.children).forEach(/** @param {DocumentQueueEntry} child */ child => { + child.doFLIP(); + }); + } +} + +customElements.define("document-queue", DocumentQueue); diff --git a/static/js/model.d.ts b/static/js/model.d.ts new file mode 100644 index 0000000..55e581c --- /dev/null +++ b/static/js/model.d.ts @@ -0,0 +1,41 @@ +export interface APIQueueEntry { + id: number; + name: string; + type: string; + page: string | undefined; +} + +export type APIPacketQueueDelete = { + ids: number[]; +} + +export type APIPacketQueueDeleteAt = { + indexA: number; + indexB: number; +} + +export type APIPacketQueueInsertAt = { + index: number; + documents: APIQueueEntry[]; +} + +export type APIPacketQueueReplace = { + documents: APIQueueEntry[]; +} + +export type APIPacketQueueShiftAt = { + index: number; + offset: number; +} + +export type APIPacketQueueShift = { + ids: number[]; + offset: number; +} + +export type APIEvents = { + queuedeleteat: CustomEvent; + queueinsertat: CustomEvent; + queuereplace: CustomEvent; + queueshiftat: CustomEvent; +} diff --git a/static/js/vendor/lit-html/lit-all.min.js b/static/js/vendor/lit-html/lit-all.min.js new file mode 100644 index 0000000..a3b000e --- /dev/null +++ b/static/js/vendor/lit-html/lit-all.min.js @@ -0,0 +1,120 @@ +/** + * @license + * Copyright 2019 Google LLC + * SPDX-License-Identifier: BSD-3-Clause + */ +const t=globalThis,s=t.ShadowRoot&&(void 0===t.ShadyCSS||t.ShadyCSS.nativeShadow)&&"adoptedStyleSheets"in Document.prototype&&"replace"in CSSStyleSheet.prototype,i=Symbol(),e=new WeakMap;class n{constructor(t,s,e){if(this._$cssResult$=!0,e!==i)throw Error("CSSResult is not constructable. Use `unsafeCSS` or `css` instead.");this.cssText=t,this.t=s}get styleSheet(){let t=this.i;const i=this.t;if(s&&void 0===t){const s=void 0!==i&&1===i.length;s&&(t=e.get(i)),void 0===t&&((this.i=t=new CSSStyleSheet).replaceSync(this.cssText),s&&e.set(i,t))}return t}toString(){return this.cssText}}const r=t=>new n("string"==typeof t?t:t+"",void 0,i),o=(t,...s)=>{const e=1===t.length?t[0]:s.reduce(((s,i,e)=>s+(t=>{if(!0===t._$cssResult$)return t.cssText;if("number"==typeof t)return t;throw Error("Value passed to 'css' function must be a 'css' function result: "+t+". Use 'unsafeCSS' to pass non-literal values, but take care to ensure page security.")})(i)+t[e+1]),t[0]);return new n(e,t,i)},h=(i,e)=>{if(s)i.adoptedStyleSheets=e.map((t=>t instanceof CSSStyleSheet?t:t.styleSheet));else for(const s of e){const e=document.createElement("style"),n=t.litNonce;void 0!==n&&e.setAttribute("nonce",n),e.textContent=s.cssText,i.appendChild(e)}},c=s?t=>t:t=>t instanceof CSSStyleSheet?(t=>{let s="";for(const i of t.cssRules)s+=i.cssText;return r(s)})(t):t +/** + * @license + * Copyright 2017 Google LLC + * SPDX-License-Identifier: BSD-3-Clause + */,{is:l,defineProperty:a,getOwnPropertyDescriptor:u,getOwnPropertyNames:d,getOwnPropertySymbols:f,getPrototypeOf:p}=Object,v=globalThis,y=v.trustedTypes,m=y?y.emptyScript:"",b=v.reactiveElementPolyfillSupport,g=(t,s)=>t,w={toAttribute(t,s){switch(s){case Boolean:t=t?m:null;break;case Object:case Array:t=null==t?t:JSON.stringify(t)}return t},fromAttribute(t,s){let i=t;switch(s){case Boolean:i=null!==t;break;case Number:i=null===t?null:Number(t);break;case Object:case Array:try{i=JSON.parse(t)}catch(t){i=null}}return i}},_=(t,s)=>!l(t,s),S={attribute:!0,type:String,converter:w,reflect:!1,useDefault:!1,hasChanged:_};Symbol.metadata??=Symbol("metadata"),v.litPropertyMetadata??=new WeakMap;class $ extends HTMLElement{static addInitializer(t){this.o(),(this.l??=[]).push(t)}static get observedAttributes(){return this.finalize(),this.u&&[...this.u.keys()]}static createProperty(t,s=S){if(s.state&&(s.attribute=!1),this.o(),this.prototype.hasOwnProperty(t)&&((s=Object.create(s)).wrapped=!0),this.elementProperties.set(t,s),!s.noAccessor){const i=Symbol(),e=this.getPropertyDescriptor(t,i,s);void 0!==e&&a(this.prototype,t,e)}}static getPropertyDescriptor(t,s,i){const{get:e,set:n}=u(this.prototype,t)??{get(){return this[s]},set(t){this[s]=t}};return{get:e,set(s){const r=e?.call(this);n?.call(this,s),this.requestUpdate(t,r,i)},configurable:!0,enumerable:!0}}static getPropertyOptions(t){return this.elementProperties.get(t)??S}static o(){if(this.hasOwnProperty(g("elementProperties")))return;const t=p(this);t.finalize(),void 0!==t.l&&(this.l=[...t.l]),this.elementProperties=new Map(t.elementProperties)}static finalize(){if(this.hasOwnProperty(g("finalized")))return;if(this.finalized=!0,this.o(),this.hasOwnProperty(g("properties"))){const t=this.properties,s=[...d(t),...f(t)];for(const i of s)this.createProperty(i,t[i])}const t=this[Symbol.metadata];if(null!==t){const s=litPropertyMetadata.get(t);if(void 0!==s)for(const[t,i]of s)this.elementProperties.set(t,i)}this.u=new Map;for(const[t,s]of this.elementProperties){const i=this.p(t,s);void 0!==i&&this.u.set(i,t)}this.elementStyles=this.finalizeStyles(this.styles)}static finalizeStyles(t){const s=[];if(Array.isArray(t)){const i=new Set(t.flat(1/0).reverse());for(const t of i)s.unshift(c(t))}else void 0!==t&&s.push(c(t));return s}static p(t,s){const i=s.attribute;return!1===i?void 0:"string"==typeof i?i:"string"==typeof t?t.toLowerCase():void 0}constructor(){super(),this.v=void 0,this.isUpdatePending=!1,this.hasUpdated=!1,this.m=null,this._()}_(){this.S=new Promise((t=>this.enableUpdating=t)),this._$AL=new Map,this.$(),this.requestUpdate(),this.constructor.l?.forEach((t=>t(this)))}addController(t){(this.P??=new Set).add(t),void 0!==this.renderRoot&&this.isConnected&&t.hostConnected?.()}removeController(t){this.P?.delete(t)}$(){const t=new Map,s=this.constructor.elementProperties;for(const i of s.keys())this.hasOwnProperty(i)&&(t.set(i,this[i]),delete this[i]);t.size>0&&(this.v=t)}createRenderRoot(){const t=this.shadowRoot??this.attachShadow(this.constructor.shadowRootOptions);return h(t,this.constructor.elementStyles),t}connectedCallback(){this.renderRoot??=this.createRenderRoot(),this.enableUpdating(!0),this.P?.forEach((t=>t.hostConnected?.()))}enableUpdating(t){}disconnectedCallback(){this.P?.forEach((t=>t.hostDisconnected?.()))}attributeChangedCallback(t,s,i){this._$AK(t,i)}C(t,s){const i=this.constructor.elementProperties.get(t),e=this.constructor.p(t,i);if(void 0!==e&&!0===i.reflect){const n=(void 0!==i.converter?.toAttribute?i.converter:w).toAttribute(s,i.type);this.m=t,null==n?this.removeAttribute(e):this.setAttribute(e,n),this.m=null}}_$AK(t,s){const i=this.constructor,e=i.u.get(t);if(void 0!==e&&this.m!==e){const t=i.getPropertyOptions(e),n="function"==typeof t.converter?{fromAttribute:t.converter}:void 0!==t.converter?.fromAttribute?t.converter:w;this.m=e,this[e]=n.fromAttribute(s,t.type)??this.T?.get(e)??null,this.m=null}}requestUpdate(t,s,i){if(void 0!==t){const e=this.constructor,n=this[t];if(i??=e.getPropertyOptions(t),!((i.hasChanged??_)(n,s)||i.useDefault&&i.reflect&&n===this.T?.get(t)&&!this.hasAttribute(e.p(t,i))))return;this.M(t,s,i)}!1===this.isUpdatePending&&(this.S=this.k())}M(t,s,{useDefault:i,reflect:e,wrapped:n},r){i&&!(this.T??=new Map).has(t)&&(this.T.set(t,r??s??this[t]),!0!==n||void 0!==r)||(this._$AL.has(t)||(this.hasUpdated||i||(s=void 0),this._$AL.set(t,s)),!0===e&&this.m!==t&&(this.A??=new Set).add(t))}async k(){this.isUpdatePending=!0;try{await this.S}catch(t){Promise.reject(t)}const t=this.scheduleUpdate();return null!=t&&await t,!this.isUpdatePending}scheduleUpdate(){return this.performUpdate()}performUpdate(){if(!this.isUpdatePending)return;if(!this.hasUpdated){if(this.renderRoot??=this.createRenderRoot(),this.v){for(const[t,s]of this.v)this[t]=s;this.v=void 0}const t=this.constructor.elementProperties;if(t.size>0)for(const[s,i]of t){const{wrapped:t}=i,e=this[s];!0!==t||this._$AL.has(s)||void 0===e||this.M(s,void 0,i,e)}}let t=!1;const s=this._$AL;try{t=this.shouldUpdate(s),t?(this.willUpdate(s),this.P?.forEach((t=>t.hostUpdate?.())),this.update(s)):this.U()}catch(s){throw t=!1,this.U(),s}t&&this._$AE(s)}willUpdate(t){}_$AE(t){this.P?.forEach((t=>t.hostUpdated?.())),this.hasUpdated||(this.hasUpdated=!0,this.firstUpdated(t)),this.updated(t)}U(){this._$AL=new Map,this.isUpdatePending=!1}get updateComplete(){return this.getUpdateComplete()}getUpdateComplete(){return this.S}shouldUpdate(t){return!0}update(t){this.A&&=this.A.forEach((t=>this.C(t,this[t]))),this.U()}updated(t){}firstUpdated(t){}}$.elementStyles=[],$.shadowRootOptions={mode:"open"},$[g("elementProperties")]=new Map,$[g("finalized")]=new Map,b?.({ReactiveElement:$}),(v.reactiveElementVersions??=[]).push("2.1.0"); +/** + * @license + * Copyright 2017 Google LLC + * SPDX-License-Identifier: BSD-3-Clause + */ +const T=globalThis,x=T.trustedTypes,E=x?x.createPolicy("lit-html",{createHTML:t=>t}):void 0,C="$lit$",P=`lit$${Math.random().toFixed(9).slice(2)}$`,M="?"+P,A=`<${M}>`,k=document,O=()=>k.createComment(""),U=t=>null===t||"object"!=typeof t&&"function"!=typeof t,V=Array.isArray,R=t=>V(t)||"function"==typeof t?.[Symbol.iterator],N="[ \t\n\f\r]",L=/<(?:(!--|\/[^a-zA-Z])|(\/?[a-zA-Z][^>\s]*)|(\/?$))/g,z=/-->/g,j=/>/g,D=RegExp(`>|${N}(?:([^\\s"'>=/]+)(${N}*=${N}*(?:[^ \t\n\f\r"'\`<>=]|("|')|))|$)`,"g"),H=/'/g,I=/"/g,B=/^(?:script|style|textarea|title)$/i,W=t=>(s,...i)=>({_$litType$:t,strings:s,values:i}),Z=W(1),q=W(2),F=W(3),G=Symbol.for("lit-noChange"),J=Symbol.for("lit-nothing"),K=new WeakMap,Y=k.createTreeWalker(k,129);function Q(t,s){if(!V(t)||!t.hasOwnProperty("raw"))throw Error("invalid template strings array");return void 0!==E?E.createHTML(s):s}const X=(t,s)=>{const i=t.length-1,e=[];let n,r=2===s?"":3===s?"":"",o=L;for(let s=0;s"===c[0]?(o=n??L,l=-1):void 0===c[1]?l=-2:(l=o.lastIndex-c[2].length,h=c[1],o=void 0===c[3]?D:'"'===c[3]?I:H):o===I||o===H?o=D:o===z||o===j?o=L:(o=D,n=void 0);const u=o===D&&t[s+1].startsWith("/>")?" ":"";r+=o===L?i+A:l>=0?(e.push(h),i.slice(0,l)+C+i.slice(l)+P+u):i+P+(-2===l?s:u)}return[Q(t,r+(t[i]||"")+(2===s?"":3===s?"":"")),e]};class tt{constructor({strings:t,_$litType$:s},i){let e;this.parts=[];let n=0,r=0;const o=t.length-1,h=this.parts,[c,l]=X(t,s);if(this.el=tt.createElement(c,i),Y.currentNode=this.el.content,2===s||3===s){const t=this.el.content.firstChild;t.replaceWith(...t.childNodes)}for(;null!==(e=Y.nextNode())&&h.length0){e.textContent=x?x.emptyScript:"";for(let i=0;i2||""!==i[0]||""!==i[1]?(this._$AH=Array(i.length-1).fill(new String),this.strings=i):this._$AH=J}_$AI(t,s=this,i,e){const n=this.strings;let r=!1;if(void 0===n)t=st(this,t,s,0),r=!U(t)||t!==this._$AH&&t!==G,r&&(this._$AH=t);else{const e=t;let o,h;for(t=n[0],o=0;o{const e=i?.renderBefore??s;let n=e._$litPart$;if(void 0===n){const t=i?.renderBefore??null;e._$litPart$=n=new et(s.insertBefore(O(),t),t,void 0,i??{})}return n._$AI(t),n},dt=globalThis; +/** + * @license + * Copyright 2017 Google LLC + * SPDX-License-Identifier: BSD-3-Clause + */class ft extends ${constructor(){super(...arguments),this.renderOptions={host:this},this.rt=void 0}createRenderRoot(){const t=super.createRenderRoot();return this.renderOptions.renderBefore??=t.firstChild,t}update(t){const s=this.render();this.hasUpdated||(this.renderOptions.isConnected=this.isConnected),super.update(t),this.rt=ut(s,this.renderRoot,this.renderOptions)}connectedCallback(){super.connectedCallback(),this.rt?.setConnected(!0)}disconnectedCallback(){super.disconnectedCallback(),this.rt?.setConnected(!1)}render(){return G}}ft._$litElement$=!0,ft["finalized"]=!0,dt.litElementHydrateSupport?.({LitElement:ft});const pt=dt.litElementPolyfillSupport;pt?.({LitElement:ft});const vt={_$AK:(t,s,i)=>{t._$AK(s,i)},_$AL:t=>t._$AL};(dt.litElementVersions??=[]).push("4.2.0"); +/** + * @license + * Copyright 2022 Google LLC + * SPDX-License-Identifier: BSD-3-Clause + */ +const yt=!1,{tt:mt}=lt,bt=t=>null===t||"object"!=typeof t&&"function"!=typeof t,gt={HTML:1,SVG:2,MATHML:3},wt=(t,s)=>void 0===s?void 0!==t?._$litType$:t?._$litType$===s,_t=t=>null!=t?._$litType$?.h,St=t=>void 0!==t?._$litDirective$,$t=t=>t?._$litDirective$,Tt=t=>void 0===t.strings,xt=()=>document.createComment(""),Et=(t,s,i)=>{const e=t._$AA.parentNode,n=void 0===s?t._$AB:s._$AA;if(void 0===i){const s=e.insertBefore(xt(),n),r=e.insertBefore(xt(),n);i=new mt(s,r,t,t.options)}else{const s=i._$AB.nextSibling,r=i._$AM,o=r!==t;if(o){let s;i._$AQ?.(t),i._$AM=t,void 0!==i._$AP&&(s=t._$AU)!==r._$AU&&i._$AP(s)}if(s!==n||o){let t=i._$AA;for(;t!==s;){const s=t.nextSibling;e.insertBefore(t,n),t=s}}}return i},Ct=(t,s,i=t)=>(t._$AI(s,i),t),Pt={},Mt=(t,s=Pt)=>t._$AH=s,At=t=>t._$AH,kt=t=>{t._$AP?.(!1,!0);let s=t._$AA;const i=t._$AB.nextSibling;for(;s!==i;){const t=s.nextSibling;s.remove(),s=t}},Ot=t=>{t._$AR()},Ut={ATTRIBUTE:1,CHILD:2,PROPERTY:3,BOOLEAN_ATTRIBUTE:4,EVENT:5,ELEMENT:6},Vt=t=>(...s)=>({_$litDirective$:t,values:s});class Rt{constructor(t){}get _$AU(){return this._$AM._$AU}_$AT(t,s,i){this.nt=t,this._$AM=s,this.ct=i}_$AS(t,s){return this.update(t,s)}update(t,s){return this.render(...s)}} +/** + * @license + * Copyright 2017 Google LLC + * SPDX-License-Identifier: BSD-3-Clause + */const Nt=(t,s)=>{const i=t._$AN;if(void 0===i)return!1;for(const t of i)t._$AO?.(s,!1),Nt(t,s);return!0},Lt=t=>{let s,i;do{if(void 0===(s=t._$AM))break;i=s._$AN,i.delete(t),t=s}while(0===i?.size)},zt=t=>{for(let s;s=t._$AM;t=s){let i=s._$AN;if(void 0===i)s._$AN=i=new Set;else if(i.has(t))break;i.add(t),Ht(s)}};function jt(t){void 0!==this._$AN?(Lt(this),this._$AM=t,zt(this)):this._$AM=t}function Dt(t,s=!1,i=0){const e=this._$AH,n=this._$AN;if(void 0!==n&&0!==n.size)if(s)if(Array.isArray(e))for(let t=i;t{2==t.type&&(t._$AP??=Dt,t._$AQ??=jt)};class It extends Rt{constructor(){super(...arguments),this._$AN=void 0}_$AT(t,s,i){super._$AT(t,s,i),zt(this),this.isConnected=t._$AU}_$AO(t,s=!0){t!==this.isConnected&&(this.isConnected=t,t?this.reconnected?.():this.disconnected?.()),s&&(Nt(this,t),Lt(this))}setValue(t){if(Tt(this.nt))this.nt._$AI(t,this);else{const s=[...this.nt._$AH];s[this.ct]=t,this.nt._$AI(s,this,0)}}disconnected(){}reconnected(){}} +/** + * @license + * Copyright 2021 Google LLC + * SPDX-License-Identifier: BSD-3-Clause + */class Bt{constructor(t){this.lt=t}disconnect(){this.lt=void 0}reconnect(t){this.lt=t}deref(){return this.lt}}class Wt{constructor(){this.ut=void 0,this.dt=void 0}get(){return this.ut}pause(){this.ut??=new Promise((t=>this.dt=t))}resume(){this.dt?.(),this.ut=this.dt=void 0}} +/** + * @license + * Copyright 2017 Google LLC + * SPDX-License-Identifier: BSD-3-Clause + */class Zt extends It{constructor(){super(...arguments),this.ft=new Bt(this),this.vt=new Wt}render(t,s){return G}update(t,[s,i]){if(this.isConnected||this.disconnected(),s===this.yt)return G;this.yt=s;let e=0;const{ft:n,vt:r}=this;return(async(t,s)=>{for await(const i of t)if(!1===await s(i))return})(s,(async t=>{for(;r.get();)await r.get();const o=n.deref();if(void 0!==o){if(o.yt!==s)return!1;void 0!==i&&(t=i(t,e)),o.commitValue(t,e),e++}return!0})),G}commitValue(t,s){this.setValue(t)}disconnected(){this.ft.disconnect(),this.vt.pause()}reconnected(){this.ft.reconnect(this),this.vt.resume()}}const qt=Vt(Zt),Ft=Vt( +/** + * @license + * Copyright 2017 Google LLC + * SPDX-License-Identifier: BSD-3-Clause + */ +class extends Zt{constructor(t){if(super(t),2!==t.type)throw Error("asyncAppend can only be used in child expressions")}update(t,s){return this.rt=t,super.update(t,s)}commitValue(t,s){0===s&&Ot(this.rt);const i=Et(this.rt);Ct(i,t)}}),Gt=t=>_t(t)?t._$litType$.h:t.strings,Jt=Vt(class extends Rt{constructor(t){super(t),this.bt=new WeakMap}render(t){return[t]}update(t,[s]){const i=wt(this.gt)?Gt(this.gt):null,e=wt(s)?Gt(s):null;if(null!==i&&(null===e||i!==e)){const s=At(t).pop();let e=this.bt.get(i);if(void 0===e){const t=document.createDocumentFragment();e=ut(J,t),e.setConnected(!1),this.bt.set(i,e)}Mt(e,[s]),Et(e,void 0,s)}if(null!==e){if(null===i||i!==e){const s=this.bt.get(e);if(void 0!==s){const i=At(s).pop();Ot(t),Et(t,void 0,i),Mt(t,[i])}}this.gt=s}else this.gt=void 0;return this.render(s)}}),Kt=(t,s,i)=>{for(const i of s)if(i[0]===t)return(0,i[1])();return i?.()},Yt=Vt( +/** + * @license + * Copyright 2018 Google LLC + * SPDX-License-Identifier: BSD-3-Clause + */ +class extends Rt{constructor(t){if(super(t),1!==t.type||"class"!==t.name||t.strings?.length>2)throw Error("`classMap()` can only be used in the `class` attribute and must be the only part in the attribute.")}render(t){return" "+Object.keys(t).filter((s=>t[s])).join(" ")+" "}update(t,[s]){if(void 0===this.wt){this.wt=new Set,void 0!==t.strings&&(this._t=new Set(t.strings.join(" ").split(/\s/).filter((t=>""!==t))));for(const t in s)s[t]&&!this._t?.has(t)&&this.wt.add(t);return this.render(s)}const i=t.element.classList;for(const t of this.wt)t in s||(i.remove(t),this.wt.delete(t));for(const t in s){const e=!!s[t];e===this.wt.has(t)||this._t?.has(t)||(e?(i.add(t),this.wt.add(t)):(i.remove(t),this.wt.delete(t)))}return G}}),Qt={},Xt=Vt(class extends Rt{constructor(){super(...arguments),this.St=Qt}render(t,s){return s()}update(t,[s,i]){if(Array.isArray(s)){if(Array.isArray(this.St)&&this.St.length===s.length&&s.every(((t,s)=>t===this.St[s])))return G}else if(this.St===s)return G;return this.St=Array.isArray(s)?Array.from(s):s,this.render(s,i)}}),ts=t=>t??J +/** + * @license + * Copyright 2021 Google LLC + * SPDX-License-Identifier: BSD-3-Clause + */;function*ss(t,s){const i="function"==typeof s;if(void 0!==t){let e=-1;for(const n of t)e>-1&&(yield i?s(e):s),e++,yield n}} +/** + * @license + * Copyright 2021 Google LLC + * SPDX-License-Identifier: BSD-3-Clause + */const is=Vt(class extends Rt{constructor(){super(...arguments),this.key=J}render(t,s){return this.key=t,s}update(t,[s,i]){return s!==this.key&&(Mt(t),this.key=s),i}}),es=Vt( +/** + * @license + * Copyright 2020 Google LLC + * SPDX-License-Identifier: BSD-3-Clause + */ +class extends Rt{constructor(t){if(super(t),3!==t.type&&1!==t.type&&4!==t.type)throw Error("The `live` directive is not allowed on child or event bindings");if(!Tt(t))throw Error("`live` bindings can only contain a single expression")}render(t){return t}update(t,[s]){if(s===G||s===J)return s;const i=t.element,e=t.name;if(3===t.type){if(s===i[e])return G}else if(4===t.type){if(!!s===i.hasAttribute(e))return G}else if(1===t.type&&i.getAttribute(e)===s+"")return G;return Mt(t),s}}); +/** + * @license + * Copyright 2021 Google LLC + * SPDX-License-Identifier: BSD-3-Clause + */ +function*ns(t,s){if(void 0!==t){let i=0;for(const e of t)yield s(e,i++)}} +/** + * @license + * Copyright 2021 Google LLC + * SPDX-License-Identifier: BSD-3-Clause + */function*rs(t,s,i=1){const e=void 0===s?0:t;s??=t;for(let t=e;i>0?tnew hs;class hs{}const cs=new WeakMap,ls=Vt(class extends It{render(t){return J}update(t,[s]){const i=s!==this.lt;return i&&void 0!==this.lt&&this.$t(void 0),(i||this.Tt!==this.xt)&&(this.lt=s,this.Et=t.options?.host,this.$t(this.xt=t.element)),J}$t(t){if(this.isConnected||(t=void 0),"function"==typeof this.lt){const s=this.Et??globalThis;let i=cs.get(s);void 0===i&&(i=new WeakMap,cs.set(s,i)),void 0!==i.get(this.lt)&&this.lt.call(this.Et,void 0),i.set(this.lt,t),void 0!==t&&this.lt.call(this.Et,t)}else this.lt.value=t}get Tt(){return"function"==typeof this.lt?cs.get(this.Et??globalThis)?.get(this.lt):this.lt?.value}disconnected(){this.Tt===this.xt&&this.$t(void 0)}reconnected(){this.$t(this.xt)}}),as=(t,s,i)=>{const e=new Map;for(let n=s;n<=i;n++)e.set(t[n],n);return e},us=Vt(class extends Rt{constructor(t){if(super(t),2!==t.type)throw Error("repeat() can only be used in text expressions")}Ct(t,s,i){let e;void 0===i?i=s:void 0!==s&&(e=s);const n=[],r=[];let o=0;for(const s of t)n[o]=e?e(s,o):o,r[o]=i(s,o),o++;return{values:r,keys:n}}render(t,s,i){return this.Ct(t,s,i).values}update(t,[s,i,e]){const n=At(t),{values:r,keys:o}=this.Ct(s,i,e);if(!Array.isArray(n))return this.Pt=o,r;const h=this.Pt??=[],c=[];let l,a,u=0,d=n.length-1,f=0,p=r.length-1;for(;u<=d&&f<=p;)if(null===n[u])u++;else if(null===n[d])d--;else if(h[u]===o[f])c[f]=Ct(n[u],r[f]),u++,f++;else if(h[d]===o[p])c[p]=Ct(n[d],r[p]),d--,p--;else if(h[u]===o[p])c[p]=Ct(n[u],r[p]),Et(t,c[p+1],n[u]),u++,p--;else if(h[d]===o[f])c[f]=Ct(n[d],r[f]),Et(t,n[u],n[d]),d--,f++;else if(void 0===l&&(l=as(o,f,p),a=as(h,u,d)),l.has(h[u]))if(l.has(h[d])){const s=a.get(o[f]),i=void 0!==s?n[s]:null;if(null===i){const s=Et(t,n[u]);Ct(s,r[f]),c[f]=s}else c[f]=Ct(i,r[f]),Et(t,n[u],i),n[s]=null;f++}else kt(n[d]),d--;else kt(n[u]),u++;for(;f<=p;){const s=Et(t,c[p+1]);Ct(s,r[f]),c[f++]=s}for(;u<=d;){const t=n[u++];null!==t&&kt(t)}return this.Pt=o,Mt(t,c),G}}),ds="important",fs=" !"+ds,ps=Vt(class extends Rt{constructor(t){if(super(t),1!==t.type||"style"!==t.name||t.strings?.length>2)throw Error("The `styleMap` directive must be used in the `style` attribute and must be the only part in the attribute.")}render(t){return Object.keys(t).reduce(((s,i)=>{const e=t[i];return null==e?s:s+`${i=i.includes("-")?i:i.replace(/(?:^(webkit|moz|ms|o)|)(?=[A-Z])/g,"-$&").toLowerCase()}:${e};`}),"")}update(t,[s]){const{style:i}=t.element;if(void 0===this.Mt)return this.Mt=new Set(Object.keys(s)),this.render(s);for(const t of this.Mt)null==s[t]&&(this.Mt.delete(t),t.includes("-")?i.removeProperty(t):i[t]=null);for(const t in s){const e=s[t];if(null!=e){this.Mt.add(t);const s="string"==typeof e&&e.endsWith(fs);t.includes("-")||s?i.setProperty(t,s?e.slice(0,-11):e,s?ds:""):i[t]=e}}return G}}),vs=Vt( +/** + * @license + * Copyright 2020 Google LLC + * SPDX-License-Identifier: BSD-3-Clause + */ +class extends Rt{constructor(t){if(super(t),2!==t.type)throw Error("templateContent can only be used in child bindings")}render(t){return this.At===t?G:(this.At=t,document.importNode(t.content,!0))}});class ys extends Rt{constructor(t){if(super(t),this.gt=J,2!==t.type)throw Error(this.constructor.directiveName+"() can only be used in child bindings")}render(t){if(t===J||null==t)return this.kt=void 0,this.gt=t;if(t===G)return t;if("string"!=typeof t)throw Error(this.constructor.directiveName+"() called with a non-string value");if(t===this.gt)return this.kt;this.gt=t;const s=[t];return s.raw=s,this.kt={_$litType$:this.constructor.resultType,strings:s,values:[]}}}ys.directiveName="unsafeHTML",ys.resultType=1;const ms=Vt(ys); +/** + * @license + * Copyright 2017 Google LLC + * SPDX-License-Identifier: BSD-3-Clause + */class bs extends ys{}bs.directiveName="unsafeSVG",bs.resultType=2;const gs=Vt(bs),ws=t=>!bt(t)&&"function"==typeof t.then,_s=1073741823; +/** + * @license + * Copyright 2017 Google LLC + * SPDX-License-Identifier: BSD-3-Clause + */class Ss extends It{constructor(){super(...arguments),this.Ot=_s,this.Ut=[],this.ft=new Bt(this),this.vt=new Wt}render(...t){return t.find((t=>!ws(t)))??G}update(t,s){const i=this.Ut;let e=i.length;this.Ut=s;const n=this.ft,r=this.vt;this.isConnected||this.disconnected();for(let t=0;tthis.Ot);t++){const o=s[t];if(!ws(o))return this.Ot=t,o;t{for(;r.get();)await r.get();const s=n.deref();if(void 0!==s){const i=s.Ut.indexOf(o);i>-1&&i{if(t?.r===xs)return t?._$litStatic$},Cs=t=>({_$litStatic$:t,r:xs}),Ps=(t,...s)=>({_$litStatic$:s.reduce(((s,i,e)=>s+(t=>{if(void 0!==t._$litStatic$)return t._$litStatic$;throw Error(`Value passed to 'literal' function must be a 'literal' result: ${t}. Use 'unsafeStatic' to pass non-literal values, but\n take care to ensure page security.`)})(i)+t[e+1]),t[0]),r:xs}),Ms=new Map,As=t=>(s,...i)=>{const e=i.length;let n,r;const o=[],h=[];let c,l=0,a=!1;for(;l;\n\n/**\n * A single CSSResult, CSSStyleSheet, or an array or nested arrays of those.\n */\nexport type CSSResultGroup = CSSResultOrNative | CSSResultArray;\n\nconst constructionToken = Symbol();\n\nconst cssTagCache = new WeakMap();\n\n/**\n * A container for a string of CSS text, that may be used to create a CSSStyleSheet.\n *\n * CSSResult is the return value of `css`-tagged template literals and\n * `unsafeCSS()`. In order to ensure that CSSResults are only created via the\n * `css` tag and `unsafeCSS()`, CSSResult cannot be constructed directly.\n */\nexport class CSSResult {\n // This property needs to remain unminified.\n ['_$cssResult$'] = true;\n readonly cssText: string;\n private _styleSheet?: CSSStyleSheet;\n private _strings: TemplateStringsArray | undefined;\n\n private constructor(\n cssText: string,\n strings: TemplateStringsArray | undefined,\n safeToken: symbol\n ) {\n if (safeToken !== constructionToken) {\n throw new Error(\n 'CSSResult is not constructable. Use `unsafeCSS` or `css` instead.'\n );\n }\n this.cssText = cssText;\n this._strings = strings;\n }\n\n // This is a getter so that it's lazy. In practice, this means stylesheets\n // are not created until the first element instance is made.\n get styleSheet(): CSSStyleSheet | undefined {\n // If `supportsAdoptingStyleSheets` is true then we assume CSSStyleSheet is\n // constructable.\n let styleSheet = this._styleSheet;\n const strings = this._strings;\n if (supportsAdoptingStyleSheets && styleSheet === undefined) {\n const cacheable = strings !== undefined && strings.length === 1;\n if (cacheable) {\n styleSheet = cssTagCache.get(strings);\n }\n if (styleSheet === undefined) {\n (this._styleSheet = styleSheet = new CSSStyleSheet()).replaceSync(\n this.cssText\n );\n if (cacheable) {\n cssTagCache.set(strings, styleSheet);\n }\n }\n }\n return styleSheet;\n }\n\n toString(): string {\n return this.cssText;\n }\n}\n\ntype ConstructableCSSResult = CSSResult & {\n new (\n cssText: string,\n strings: TemplateStringsArray | undefined,\n safeToken: symbol\n ): CSSResult;\n};\n\nconst textFromCSSResult = (value: CSSResultGroup | number) => {\n // This property needs to remain unminified.\n if ((value as CSSResult)['_$cssResult$'] === true) {\n return (value as CSSResult).cssText;\n } else if (typeof value === 'number') {\n return value;\n } else {\n throw new Error(\n `Value passed to 'css' function must be a 'css' function result: ` +\n `${value}. Use 'unsafeCSS' to pass non-literal values, but take care ` +\n `to ensure page security.`\n );\n }\n};\n\n/**\n * Wrap a value for interpolation in a {@linkcode css} tagged template literal.\n *\n * This is unsafe because untrusted CSS text can be used to phone home\n * or exfiltrate data to an attacker controlled site. Take care to only use\n * this with trusted input.\n */\nexport const unsafeCSS = (value: unknown) =>\n new (CSSResult as ConstructableCSSResult)(\n typeof value === 'string' ? value : String(value),\n undefined,\n constructionToken\n );\n\n/**\n * A template literal tag which can be used with LitElement's\n * {@linkcode LitElement.styles} property to set element styles.\n *\n * For security reasons, only literal string values and number may be used in\n * embedded expressions. To incorporate non-literal values {@linkcode unsafeCSS}\n * may be used inside an expression.\n */\nexport const css = (\n strings: TemplateStringsArray,\n ...values: (CSSResultGroup | number)[]\n): CSSResult => {\n const cssText =\n strings.length === 1\n ? strings[0]\n : values.reduce(\n (acc, v, idx) => acc + textFromCSSResult(v) + strings[idx + 1],\n strings[0]\n );\n return new (CSSResult as ConstructableCSSResult)(\n cssText,\n strings,\n constructionToken\n );\n};\n\n/**\n * Applies the given styles to a `shadowRoot`. When Shadow DOM is\n * available but `adoptedStyleSheets` is not, styles are appended to the\n * `shadowRoot` to [mimic the native feature](https://developer.mozilla.org/en-US/docs/Web/API/ShadowRoot/adoptedStyleSheets).\n * Note, when shimming is used, any styles that are subsequently placed into\n * the shadowRoot should be placed *before* any shimmed adopted styles. This\n * will match spec behavior that gives adopted sheets precedence over styles in\n * shadowRoot.\n */\nexport const adoptStyles = (\n renderRoot: ShadowRoot,\n styles: Array\n) => {\n if (supportsAdoptingStyleSheets) {\n (renderRoot as ShadowRoot).adoptedStyleSheets = styles.map((s) =>\n s instanceof CSSStyleSheet ? s : s.styleSheet!\n );\n } else {\n for (const s of styles) {\n const style = document.createElement('style');\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n const nonce = (global as any)['litNonce'];\n if (nonce !== undefined) {\n style.setAttribute('nonce', nonce);\n }\n style.textContent = (s as CSSResult).cssText;\n renderRoot.appendChild(style);\n }\n }\n};\n\nconst cssResultFromStyleSheet = (sheet: CSSStyleSheet) => {\n let cssText = '';\n for (const rule of sheet.cssRules) {\n cssText += rule.cssText;\n }\n return unsafeCSS(cssText);\n};\n\nexport const getCompatibleStyle =\n supportsAdoptingStyleSheets ||\n (NODE_MODE && global.CSSStyleSheet === undefined)\n ? (s: CSSResultOrNative) => s\n : (s: CSSResultOrNative) =>\n s instanceof CSSStyleSheet ? cssResultFromStyleSheet(s) : s;\n","/**\n * @license\n * Copyright 2017 Google LLC\n * SPDX-License-Identifier: BSD-3-Clause\n */\n\n/**\n * Use this module if you want to create your own base class extending\n * {@link ReactiveElement}.\n * @packageDocumentation\n */\n\nimport {\n getCompatibleStyle,\n adoptStyles,\n CSSResultGroup,\n CSSResultOrNative,\n} from './css-tag.js';\nimport type {\n ReactiveController,\n ReactiveControllerHost,\n} from './reactive-controller.js';\n\n// In the Node build, this import will be injected by Rollup:\n// import {HTMLElement, customElements} from '@lit-labs/ssr-dom-shim';\n\nexport * from './css-tag.js';\nexport type {\n ReactiveController,\n ReactiveControllerHost,\n} from './reactive-controller.js';\n\n/**\n * Removes the `readonly` modifier from properties in the union K.\n *\n * This is a safer way to cast a value to a type with a mutable version of a\n * readonly field, than casting to an interface with the field re-declared\n * because it preserves the type of all the fields and warns on typos.\n */\ntype Mutable = Omit & {\n -readonly [P in keyof Pick]: P extends K ? T[P] : never;\n};\n\n// TODO (justinfagnani): Add `hasOwn` here when we ship ES2022\nconst {\n is,\n defineProperty,\n getOwnPropertyDescriptor,\n getOwnPropertyNames,\n getOwnPropertySymbols,\n getPrototypeOf,\n} = Object;\n\nconst NODE_MODE = false;\n\n// Lets a minifier replace globalThis references with a minified name\nconst global = globalThis;\n\nif (NODE_MODE) {\n global.customElements ??= customElements;\n}\n\nconst DEV_MODE = true;\n\nlet issueWarning: (code: string, warning: string) => void;\n\nconst trustedTypes = (global as unknown as {trustedTypes?: {emptyScript: ''}})\n .trustedTypes;\n\n// Temporary workaround for https://crbug.com/993268\n// Currently, any attribute starting with \"on\" is considered to be a\n// TrustedScript source. Such boolean attributes must be set to the equivalent\n// trusted emptyScript value.\nconst emptyStringForBooleanAttribute = trustedTypes\n ? (trustedTypes.emptyScript as unknown as '')\n : '';\n\nconst polyfillSupport = DEV_MODE\n ? global.reactiveElementPolyfillSupportDevMode\n : global.reactiveElementPolyfillSupport;\n\nif (DEV_MODE) {\n // Ensure warnings are issued only 1x, even if multiple versions of Lit\n // are loaded.\n global.litIssuedWarnings ??= new Set();\n\n /**\n * Issue a warning if we haven't already, based either on `code` or `warning`.\n * Warnings are disabled automatically only by `warning`; disabling via `code`\n * can be done by users.\n */\n issueWarning = (code: string, warning: string) => {\n warning += ` See https://lit.dev/msg/${code} for more information.`;\n if (\n !global.litIssuedWarnings!.has(warning) &&\n !global.litIssuedWarnings!.has(code)\n ) {\n console.warn(warning);\n global.litIssuedWarnings!.add(warning);\n }\n };\n\n queueMicrotask(() => {\n issueWarning(\n 'dev-mode',\n `Lit is in dev mode. Not recommended for production!`\n );\n\n // Issue polyfill support warning.\n if (global.ShadyDOM?.inUse && polyfillSupport === undefined) {\n issueWarning(\n 'polyfill-support-missing',\n `Shadow DOM is being polyfilled via \\`ShadyDOM\\` but ` +\n `the \\`polyfill-support\\` module has not been loaded.`\n );\n }\n });\n}\n\n/**\n * Contains types that are part of the unstable debug API.\n *\n * Everything in this API is not stable and may change or be removed in the future,\n * even on patch releases.\n */\n// eslint-disable-next-line @typescript-eslint/no-namespace\nexport namespace ReactiveUnstable {\n /**\n * When Lit is running in dev mode and `window.emitLitDebugLogEvents` is true,\n * we will emit 'lit-debug' events to window, with live details about the update and render\n * lifecycle. These can be useful for writing debug tooling and visualizations.\n *\n * Please be aware that running with window.emitLitDebugLogEvents has performance overhead,\n * making certain operations that are normally very cheap (like a no-op render) much slower,\n * because we must copy data and dispatch events.\n */\n // eslint-disable-next-line @typescript-eslint/no-namespace\n export namespace DebugLog {\n export type Entry = Update;\n export interface Update {\n kind: 'update';\n }\n }\n}\n\ninterface DebugLoggingWindow {\n // Even in dev mode, we generally don't want to emit these events, as that's\n // another level of cost, so only emit them when DEV_MODE is true _and_ when\n // window.emitLitDebugEvents is true.\n emitLitDebugLogEvents?: boolean;\n}\n\n/**\n * Useful for visualizing and logging insights into what the Lit template system is doing.\n *\n * Compiled out of prod mode builds.\n */\nconst debugLogEvent = DEV_MODE\n ? (event: ReactiveUnstable.DebugLog.Entry) => {\n const shouldEmit = (global as unknown as DebugLoggingWindow)\n .emitLitDebugLogEvents;\n if (!shouldEmit) {\n return;\n }\n global.dispatchEvent(\n new CustomEvent('lit-debug', {\n detail: event,\n })\n );\n }\n : undefined;\n\n/*\n * When using Closure Compiler, JSCompiler_renameProperty(property, object) is\n * replaced at compile time by the munged name for object[property]. We cannot\n * alias this function, so we have to use a small shim that has the same\n * behavior when not compiling.\n */\n/*@__INLINE__*/\nconst JSCompiler_renameProperty =

(\n prop: P,\n _obj: unknown\n): P => prop;\n\n/**\n * Converts property values to and from attribute values.\n */\nexport interface ComplexAttributeConverter {\n /**\n * Called to convert an attribute value to a property\n * value.\n */\n fromAttribute?(value: string | null, type?: TypeHint): Type;\n\n /**\n * Called to convert a property value to an attribute\n * value.\n *\n * It returns unknown instead of string, to be compatible with\n * https://github.com/WICG/trusted-types (and similar efforts).\n */\n toAttribute?(value: Type, type?: TypeHint): unknown;\n}\n\ntype AttributeConverter =\n | ComplexAttributeConverter\n | ((value: string | null, type?: TypeHint) => Type);\n\n/**\n * Defines options for a property accessor.\n */\nexport interface PropertyDeclaration {\n /**\n * When set to `true`, indicates the property is internal private state. The\n * property should not be set by users. When using TypeScript, this property\n * should be marked as `private` or `protected`, and it is also a common\n * practice to use a leading `_` in the name. The property is not added to\n * `observedAttributes`.\n */\n readonly state?: boolean;\n\n /**\n * Indicates how and whether the property becomes an observed attribute.\n * If the value is `false`, the property is not added to `observedAttributes`.\n * If true or absent, the lowercased property name is observed (e.g. `fooBar`\n * becomes `foobar`). If a string, the string value is observed (e.g\n * `attribute: 'foo-bar'`).\n */\n readonly attribute?: boolean | string;\n\n /**\n * Indicates the type of the property. This is used only as a hint for the\n * `converter` to determine how to convert the attribute\n * to/from a property.\n */\n readonly type?: TypeHint;\n\n /**\n * Indicates how to convert the attribute to/from a property. If this value\n * is a function, it is used to convert the attribute value a the property\n * value. If it's an object, it can have keys for `fromAttribute` and\n * `toAttribute`. If no `toAttribute` function is provided and\n * `reflect` is set to `true`, the property value is set directly to the\n * attribute. A default `converter` is used if none is provided; it supports\n * `Boolean`, `String`, `Number`, `Object`, and `Array`. Note,\n * when a property changes and the converter is used to update the attribute,\n * the property is never updated again as a result of the attribute changing,\n * and vice versa.\n */\n readonly converter?: AttributeConverter;\n\n /**\n * Indicates if the property should reflect to an attribute.\n * If `true`, when the property is set, the attribute is set using the\n * attribute name determined according to the rules for the `attribute`\n * property option and the value of the property converted using the rules\n * from the `converter` property option.\n */\n readonly reflect?: boolean;\n\n /**\n * A function that indicates if a property should be considered changed when\n * it is set. The function should take the `newValue` and `oldValue` and\n * return `true` if an update should be requested.\n */\n hasChanged?(value: Type, oldValue: Type): boolean;\n\n /**\n * Indicates whether an accessor will be created for this property. By\n * default, an accessor will be generated for this property that requests an\n * update when set. If this flag is `true`, no accessor will be created, and\n * it will be the user's responsibility to call\n * `this.requestUpdate(propertyName, oldValue)` to request an update when\n * the property changes.\n */\n readonly noAccessor?: boolean;\n\n /**\n * Whether this property is wrapping accessors. This is set by `@property`\n * to control the initial value change and reflection logic.\n *\n * @internal\n */\n wrapped?: boolean;\n\n /**\n * When `true`, uses the initial value of the property as the default value,\n * which changes how attributes are handled:\n * - The initial value does *not* reflect, even if the `reflect` option is `true`.\n * Subsequent changes to the property will reflect, even if they are equal to the\n * default value.\n * - When the attribute is removed, the property is set to the default value\n * - The initial value will not trigger an old value in the `changedProperties` map\n * argument to update lifecycle methods.\n *\n * When set, properties must be initialized, either with a field initializer, or an\n * assignment in the constructor. Not initializing the property may lead to\n * improper handling of subsequent property assignments.\n *\n * While this behavior is opt-in, most properties that reflect to attributes should\n * use `useDefault: true` so that their initial values do not reflect.\n */\n useDefault?: boolean;\n}\n\n/**\n * Map of properties to PropertyDeclaration options. For each property an\n * accessor is made, and the property is processed according to the\n * PropertyDeclaration options.\n */\nexport interface PropertyDeclarations {\n readonly [key: string]: PropertyDeclaration;\n}\n\ntype PropertyDeclarationMap = Map;\n\ntype AttributeMap = Map;\n\n/**\n * A Map of property keys to values.\n *\n * Takes an optional type parameter T, which when specified as a non-any,\n * non-unknown type, will make the Map more strongly-typed, associating the map\n * keys with their corresponding value type on T.\n *\n * Use `PropertyValues` when overriding ReactiveElement.update() and\n * other lifecycle methods in order to get stronger type-checking on keys\n * and values.\n */\n// This type is conditional so that if the parameter T is not specified, or\n// is `any`, the type will include `Map`. Since T is not\n// given in the uses of PropertyValues in this file, all uses here fallback to\n// meaning `Map`, but if a developer uses\n// `PropertyValues` (or any other value for T) they will get a\n// strongly-typed Map type.\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nexport type PropertyValues = T extends object\n ? PropertyValueMap\n : Map;\n\n/**\n * Do not use, instead prefer {@linkcode PropertyValues}.\n */\n// This type must be exported such that JavaScript generated by the Google\n// Closure Compiler can import a type reference.\nexport interface PropertyValueMap extends Map {\n get(k: K): T[K] | undefined;\n set(key: K, value: T[K]): this;\n has(k: K): boolean;\n delete(k: K): boolean;\n}\n\nexport const defaultConverter: ComplexAttributeConverter = {\n toAttribute(value: unknown, type?: unknown): unknown {\n switch (type) {\n case Boolean:\n value = value ? emptyStringForBooleanAttribute : null;\n break;\n case Object:\n case Array:\n // if the value is `null` or `undefined` pass this through\n // to allow removing/no change behavior.\n value = value == null ? value : JSON.stringify(value);\n break;\n }\n return value;\n },\n\n fromAttribute(value: string | null, type?: unknown) {\n let fromValue: unknown = value;\n switch (type) {\n case Boolean:\n fromValue = value !== null;\n break;\n case Number:\n fromValue = value === null ? null : Number(value);\n break;\n case Object:\n case Array:\n // Do *not* generate exception when invalid JSON is set as elements\n // don't normally complain on being mis-configured.\n // TODO(sorvell): Do generate exception in *dev mode*.\n try {\n // Assert to adhere to Bazel's \"must type assert JSON parse\" rule.\n fromValue = JSON.parse(value!) as unknown;\n } catch (e) {\n fromValue = null;\n }\n break;\n }\n return fromValue;\n },\n};\n\nexport interface HasChanged {\n (value: unknown, old: unknown): boolean;\n}\n\n/**\n * Change function that returns true if `value` is different from `oldValue`.\n * This method is used as the default for a property's `hasChanged` function.\n */\nexport const notEqual: HasChanged = (value: unknown, old: unknown): boolean =>\n !is(value, old);\n\nconst defaultPropertyDeclaration: PropertyDeclaration = {\n attribute: true,\n type: String,\n converter: defaultConverter,\n reflect: false,\n useDefault: false,\n hasChanged: notEqual,\n};\n\n/**\n * A string representing one of the supported dev mode warning categories.\n */\nexport type WarningKind =\n | 'change-in-update'\n | 'migration'\n | 'async-perform-update';\n\nexport type Initializer = (element: ReactiveElement) => void;\n\n// Temporary, until google3 is on TypeScript 5.2\ndeclare global {\n interface SymbolConstructor {\n readonly metadata: unique symbol;\n }\n}\n\n// Ensure metadata is enabled. TypeScript does not polyfill\n// Symbol.metadata, so we must ensure that it exists.\n(Symbol as {metadata: symbol}).metadata ??= Symbol('metadata');\n\ndeclare global {\n // This is public global API, do not change!\n // eslint-disable-next-line no-var\n var litPropertyMetadata: WeakMap<\n object,\n Map\n >;\n}\n\n// Map from a class's metadata object to property options\n// Note that we must use nullish-coalescing assignment so that we only use one\n// map even if we load multiple version of this module.\nglobal.litPropertyMetadata ??= new WeakMap<\n object,\n Map\n>();\n\n/**\n * Base element class which manages element properties and attributes. When\n * properties change, the `update` method is asynchronously called. This method\n * should be supplied by subclasses to render updates as desired.\n * @noInheritDoc\n */\nexport abstract class ReactiveElement\n // In the Node build, this `extends` clause will be substituted with\n // `(globalThis.HTMLElement ?? HTMLElement)`.\n //\n // This way, we will first prefer any global `HTMLElement` polyfill that the\n // user has assigned, and then fall back to the `HTMLElement` shim which has\n // been imported (see note at the top of this file about how this import is\n // generated by Rollup). Note that the `HTMLElement` variable has been\n // shadowed by this import, so it no longer refers to the global.\n extends HTMLElement\n implements ReactiveControllerHost\n{\n // Note: these are patched in only in DEV_MODE.\n /**\n * Read or set all the enabled warning categories for this class.\n *\n * This property is only used in development builds.\n *\n * @nocollapse\n * @category dev-mode\n */\n static enabledWarnings?: WarningKind[];\n\n /**\n * Enable the given warning category for this class.\n *\n * This method only exists in development builds, so it should be accessed\n * with a guard like:\n *\n * ```ts\n * // Enable for all ReactiveElement subclasses\n * ReactiveElement.enableWarning?.('migration');\n *\n * // Enable for only MyElement and subclasses\n * MyElement.enableWarning?.('migration');\n * ```\n *\n * @nocollapse\n * @category dev-mode\n */\n static enableWarning?: (warningKind: WarningKind) => void;\n\n /**\n * Disable the given warning category for this class.\n *\n * This method only exists in development builds, so it should be accessed\n * with a guard like:\n *\n * ```ts\n * // Disable for all ReactiveElement subclasses\n * ReactiveElement.disableWarning?.('migration');\n *\n * // Disable for only MyElement and subclasses\n * MyElement.disableWarning?.('migration');\n * ```\n *\n * @nocollapse\n * @category dev-mode\n */\n static disableWarning?: (warningKind: WarningKind) => void;\n\n /**\n * Adds an initializer function to the class that is called during instance\n * construction.\n *\n * This is useful for code that runs against a `ReactiveElement`\n * subclass, such as a decorator, that needs to do work for each\n * instance, such as setting up a `ReactiveController`.\n *\n * ```ts\n * const myDecorator = (target: typeof ReactiveElement, key: string) => {\n * target.addInitializer((instance: ReactiveElement) => {\n * // This is run during construction of the element\n * new MyController(instance);\n * });\n * }\n * ```\n *\n * Decorating a field will then cause each instance to run an initializer\n * that adds a controller:\n *\n * ```ts\n * class MyElement extends LitElement {\n * @myDecorator foo;\n * }\n * ```\n *\n * Initializers are stored per-constructor. Adding an initializer to a\n * subclass does not add it to a superclass. Since initializers are run in\n * constructors, initializers will run in order of the class hierarchy,\n * starting with superclasses and progressing to the instance's class.\n *\n * @nocollapse\n */\n static addInitializer(initializer: Initializer) {\n this.__prepare();\n (this._initializers ??= []).push(initializer);\n }\n\n static _initializers?: Initializer[];\n\n /*\n * Due to closure compiler ES6 compilation bugs, @nocollapse is required on\n * all static methods and properties with initializers. Reference:\n * - https://github.com/google/closure-compiler/issues/1776\n */\n\n /**\n * Maps attribute names to properties; for example `foobar` attribute to\n * `fooBar` property. Created lazily on user subclasses when finalizing the\n * class.\n * @nocollapse\n */\n private static __attributeToPropertyMap: AttributeMap;\n\n /**\n * Marks class as having been finalized, which includes creating properties\n * from `static properties`, but does *not* include all properties created\n * from decorators.\n * @nocollapse\n */\n protected static finalized: true | undefined;\n\n /**\n * Memoized list of all element properties, including any superclass\n * properties. Created lazily on user subclasses when finalizing the class.\n *\n * @nocollapse\n * @category properties\n */\n static elementProperties: PropertyDeclarationMap;\n\n /**\n * User-supplied object that maps property names to `PropertyDeclaration`\n * objects containing options for configuring reactive properties. When\n * a reactive property is set the element will update and render.\n *\n * By default properties are public fields, and as such, they should be\n * considered as primarily settable by element users, either via attribute or\n * the property itself.\n *\n * Generally, properties that are changed by the element should be private or\n * protected fields and should use the `state: true` option. Properties\n * marked as `state` do not reflect from the corresponding attribute\n *\n * However, sometimes element code does need to set a public property. This\n * should typically only be done in response to user interaction, and an event\n * should be fired informing the user; for example, a checkbox sets its\n * `checked` property when clicked and fires a `changed` event. Mutating\n * public properties should typically not be done for non-primitive (object or\n * array) properties. In other cases when an element needs to manage state, a\n * private property set with the `state: true` option should be used. When\n * needed, state properties can be initialized via public properties to\n * facilitate complex interactions.\n * @nocollapse\n * @category properties\n */\n static properties: PropertyDeclarations;\n\n /**\n * Memoized list of all element styles.\n * Created lazily on user subclasses when finalizing the class.\n * @nocollapse\n * @category styles\n */\n static elementStyles: Array = [];\n\n /**\n * Array of styles to apply to the element. The styles should be defined\n * using the {@linkcode css} tag function, via constructible stylesheets, or\n * imported from native CSS module scripts.\n *\n * Note on Content Security Policy:\n *\n * Element styles are implemented with `