]> Cypherpunks.ru repositories - nncp.git/commitdiff
Ability to send directories, creating pax archive
authorSergey Matveev <stargrave@stargrave.org>
Fri, 22 Nov 2019 19:40:40 +0000 (22:40 +0300)
committerSergey Matveev <stargrave@stargrave.org>
Sun, 24 Nov 2019 15:08:55 +0000 (18:08 +0300)
VERSION
doc/bundles.texi
doc/cmds.texi
doc/news.ru.texi
doc/news.texi
src/tx.go

diff --git a/VERSION b/VERSION
index 0062ac971805f7b700058db4bb0f5c5b771dda76..831446cbd27a6de403344b21c9fa93a25357f43d 100644 (file)
--- a/VERSION
+++ b/VERSION
@@ -1 +1 @@
-5.0.0
+5.1.0
index 8330cdf2565534bd480d28abf8e70ddb4b444799..21b31046a1ae1c7d1d0aec48f6f5f76757b86c4c 100644 (file)
@@ -48,8 +48,8 @@ can contain mix of various recipients.
 
 @end itemize
 
 
 @end itemize
 
-Technically bundle is valid POSIX.1-2001 (pax)
-@url{http://pubs.opengroup.org/onlinepubs/9699919799/utilities/pax.html, tar archive},
+Technically bundle is valid POSIX.1-2001
+@url{https://pubs.opengroup.org/onlinepubs/9699919799/utilities/pax.html#tag_20_92_13_01, pax archive}
 with directory/files hierarchy identical to that is used in
 @ref{nncp-xfer}: @file{NNCP/RECIPIENT/SENDER/PACKET}. So bundle can also
 be created by manual tar-ing of @command{nncp-xfer} resulting directory.
 with directory/files hierarchy identical to that is used in
 @ref{nncp-xfer}: @file{NNCP/RECIPIENT/SENDER/PACKET}. So bundle can also
 be created by manual tar-ing of @command{nncp-xfer} resulting directory.
index 4247054b10a4c0b73f21fac3e583d52c05b5c125..7097e821648078f449bb8390606e7afb6006830c 100644 (file)
@@ -315,6 +315,15 @@ variable. Encryption is performed in AEAD mode with
 algorithms. Data is splitted on 128 KiB blocks. Each block is encrypted
 with increasing nonce counter.
 
 algorithms. Data is splitted on 128 KiB blocks. Each block is encrypted
 with increasing nonce counter.
 
+If @file{SRC} points to directory, then
+@url{https://pubs.opengroup.org/onlinepubs/9699919799/utilities/pax.html#tag_20_92_13_01, pax archive}
+will be created on the fly with directory contents and destination
+filename @file{.tar} appended. It @strong{won't} contain any entities
+metainformation, but modification time with the names. UID/GID are set
+to zero. Directories have 777 permissions, files have 666, for being
+friendly with @command{umask}. Also each entity will have comment like
+@verb{|Autogenerated by NNCP version X.Y.Z built with goXXX|}.
+
 If @option{-chunked} is specified, then source file will be split
 @ref{Chunked, on chunks}. @option{INT} is the desired chunk size in
 KiBs. This mode is more CPU hungry. Pay attention that chunk is saved in
 If @option{-chunked} is specified, then source file will be split
 @ref{Chunked, on chunks}. @option{INT} is the desired chunk size in
 KiBs. This mode is more CPU hungry. Pay attention that chunk is saved in
index d1fe6d398db1f0b98ad523cd80cd407280cb2d35..e85b0b8e2a44e04ac5c43950f5a1843ffd0c6789 100644 (file)
@@ -1,6 +1,16 @@
 @node Новости
 @section Новости
 
 @node Новости
 @section Новости
 
+@node Релиз 5.1.0
+@subsection Релиз 5.1.0
+@itemize
+
+@item
+@command{nncp-file} может отправлять директории, автоматически на лету
+создавая pax архив.
+
+@end itemize
+
 @node Релиз 5.0.0
 @subsection Релиз 5.0.0
 @itemize
 @node Релиз 5.0.0
 @subsection Релиз 5.0.0
 @itemize
index b57c5ea567e3d07c1724ce98c58b2613c39a17d1..e29cf17e064cf22573967724de7abcb01d48a70b 100644 (file)
@@ -3,6 +3,16 @@
 
 See also this page @ref{Новости, on russian}.
 
 
 See also this page @ref{Новости, on russian}.
 
+@node Release 5.1.0
+@section Release 5.1.0
+@itemize
+
+@item
+@command{nncp-file} can send directories, automatically creating pax
+archive on the fly.
+
+@end itemize
+
 @node Release 5.0.0
 @section Release 5.0.0
 @itemize
 @node Release 5.0.0
 @section Release 5.0.0
 @itemize
index 022789966197f4d799e72709d83adc993af411d1..88d4e9af4b4dfd9fe3f570fbca88b6d65296a951 100644 (file)
--- a/src/tx.go
+++ b/src/tx.go
@@ -18,6 +18,7 @@ along with this program.  If not, see <http://www.gnu.org/licenses/>.
 package nncp
 
 import (
 package nncp
 
 import (
+       "archive/tar"
        "bufio"
        "bytes"
        "crypto/rand"
        "bufio"
        "bytes"
        "crypto/rand"
@@ -29,6 +30,7 @@ import (
        "path/filepath"
        "strconv"
        "strings"
        "path/filepath"
        "strconv"
        "strings"
+       "time"
 
        xdr "github.com/davecgh/go-xdr/xdr2"
        "github.com/klauspost/compress/zstd"
 
        xdr "github.com/davecgh/go-xdr/xdr2"
        "github.com/klauspost/compress/zstd"
@@ -36,6 +38,11 @@ import (
        "golang.org/x/crypto/chacha20poly1305"
 )
 
        "golang.org/x/crypto/chacha20poly1305"
 )
 
+const (
+       TarBlockSize = 512
+       TarExt       = ".tar"
+)
+
 func (ctx *Ctx) Tx(
        node *Node,
        pkt *Pkt,
 func (ctx *Ctx) Tx(
        node *Node,
        pkt *Pkt,
@@ -109,77 +116,190 @@ func (ctx *Ctx) Tx(
        return lastNode, err
 }
 
        return lastNode, err
 }
 
-func prepareTxFile(srcPath string) (io.Reader, *os.File, int64, error) {
-       var reader io.Reader
-       var src *os.File
-       var fileSize int64
-       var err error
+type DummyCloser struct{}
+
+func (dc DummyCloser) Close() error { return nil }
+
+func prepareTxFile(srcPath string) (reader io.Reader, closer io.Closer, fileSize int64, archived bool, rerr error) {
        if srcPath == "-" {
        if srcPath == "-" {
-               src, err = ioutil.TempFile("", "nncp-file")
+               // Read content from stdin, saving to temporary file, encrypting
+               // on the fly
+               src, err := ioutil.TempFile("", "nncp-file")
                if err != nil {
                if err != nil {
-                       return nil, nil, 0, err
+                       rerr = err
+                       return
                }
                os.Remove(src.Name())
                tmpW := bufio.NewWriter(src)
                tmpKey := make([]byte, chacha20poly1305.KeySize)
                }
                os.Remove(src.Name())
                tmpW := bufio.NewWriter(src)
                tmpKey := make([]byte, chacha20poly1305.KeySize)
-               if _, err = rand.Read(tmpKey[:]); err != nil {
-                       return nil, nil, 0, err
+               if _, rerr = rand.Read(tmpKey[:]); rerr != nil {
+                       return
                }
                aead, err := chacha20poly1305.New(tmpKey)
                if err != nil {
                }
                aead, err := chacha20poly1305.New(tmpKey)
                if err != nil {
-                       return nil, nil, 0, err
+                       rerr = err
+                       return
                }
                nonce := make([]byte, aead.NonceSize())
                written, err := aeadProcess(aead, nonce, true, bufio.NewReader(os.Stdin), tmpW)
                if err != nil {
                }
                nonce := make([]byte, aead.NonceSize())
                written, err := aeadProcess(aead, nonce, true, bufio.NewReader(os.Stdin), tmpW)
                if err != nil {
-                       return nil, nil, 0, err
+                       rerr = err
+                       return
                }
                fileSize = int64(written)
                if err = tmpW.Flush(); err != nil {
                }
                fileSize = int64(written)
                if err = tmpW.Flush(); err != nil {
-                       return nil, nil, 0, err
+                       return
                }
                src.Seek(0, io.SeekStart)
                r, w := io.Pipe()
                go func() {
                        if _, err := aeadProcess(aead, nonce, false, bufio.NewReader(src), w); err != nil {
                }
                src.Seek(0, io.SeekStart)
                r, w := io.Pipe()
                go func() {
                        if _, err := aeadProcess(aead, nonce, false, bufio.NewReader(src), w); err != nil {
-                               panic(err)
+                               w.CloseWithError(err)
                        }
                }()
                reader = r
                        }
                }()
                reader = r
-       } else {
-               src, err = os.Open(srcPath)
-               if err != nil {
-                       return nil, nil, 0, err
-               }
-               srcStat, err := src.Stat()
+               closer = src
+               return
+       }
+
+       srcStat, err := os.Stat(srcPath)
+       if err != nil {
+               rerr = err
+               return
+       }
+       mode := srcStat.Mode()
+
+       if mode.IsRegular() {
+               // It is regular file, just send it
+               src, err := os.Open(srcPath)
                if err != nil {
                if err != nil {
-                       return nil, nil, 0, err
+                       rerr = err
+                       return
                }
                fileSize = srcStat.Size()
                reader = bufio.NewReader(src)
                }
                fileSize = srcStat.Size()
                reader = bufio.NewReader(src)
+               closer = src
+               return
+       }
+
+       if !mode.IsDir() {
+               rerr = errors.New("unsupported file type")
+               return
+       }
+
+       // It is directory, create PAX archive with its contents
+       archived = true
+       basePath := filepath.Base(srcPath)
+       rootPath, err := filepath.Abs(srcPath)
+       if err != nil {
+               rerr = err
+               return
+       }
+       type einfo struct {
+               path    string
+               modTime time.Time
+               size    int64
+       }
+       dirs := make([]einfo, 0, 1<<10)
+       files := make([]einfo, 0, 1<<10)
+       rerr = filepath.Walk(rootPath, func(path string, info os.FileInfo, err error) error {
+               if err != nil {
+                       return err
+               }
+               if info.IsDir() {
+                       // directory header, PAX record header+contents
+                       fileSize += TarBlockSize + 2*TarBlockSize
+                       dirs = append(dirs, einfo{path: path, modTime: info.ModTime()})
+               } else {
+                       // file header, PAX record header+contents, file content
+                       fileSize += TarBlockSize + 2*TarBlockSize + info.Size()
+                       if n := info.Size() % TarBlockSize; n != 0 {
+                               fileSize += TarBlockSize - n // padding
+                       }
+                       files = append(files, einfo{
+                               path:    path,
+                               modTime: info.ModTime(),
+                               size:    info.Size(),
+                       })
+               }
+               return nil
+       })
+       if rerr != nil {
+               return
        }
        }
-       return reader, src, fileSize, nil
+
+       r, w := io.Pipe()
+       reader = r
+       closer = DummyCloser{}
+       fileSize += 2 * TarBlockSize // termination block
+
+       go func() {
+               tarWr := tar.NewWriter(w)
+               hdr := tar.Header{
+                       Typeflag: tar.TypeDir,
+                       Mode:     0777,
+                       PAXRecords: map[string]string{
+                               "comment": "Autogenerated by " + VersionGet(),
+                       },
+                       Format: tar.FormatPAX,
+               }
+               for _, e := range dirs {
+                       hdr.Name = basePath + e.path[len(rootPath):]
+                       hdr.ModTime = e.modTime
+                       if err = tarWr.WriteHeader(&hdr); err != nil {
+                               w.CloseWithError(err)
+                       }
+               }
+               hdr.Typeflag = tar.TypeReg
+               hdr.Mode = 0666
+               for _, e := range files {
+                       hdr.Name = basePath + e.path[len(rootPath):]
+                       hdr.ModTime = e.modTime
+                       hdr.Size = e.size
+                       if err = tarWr.WriteHeader(&hdr); err != nil {
+                               w.CloseWithError(err)
+                       }
+                       fd, err := os.Open(e.path)
+                       if err != nil {
+                               w.CloseWithError(err)
+                       }
+                       _, err = io.Copy(tarWr, bufio.NewReader(fd))
+                       if err != nil {
+                               w.CloseWithError(err)
+                       }
+                       fd.Close()
+               }
+               tarWr.Close()
+               w.Close()
+       }()
+       return
 }
 
 func (ctx *Ctx) TxFile(node *Node, nice uint8, srcPath, dstPath string, minSize int64) error {
 }
 
 func (ctx *Ctx) TxFile(node *Node, nice uint8, srcPath, dstPath string, minSize int64) error {
+       dstPathSpecified := false
        if dstPath == "" {
                if srcPath == "-" {
                        return errors.New("Must provide destination filename")
                }
                dstPath = filepath.Base(srcPath)
        if dstPath == "" {
                if srcPath == "-" {
                        return errors.New("Must provide destination filename")
                }
                dstPath = filepath.Base(srcPath)
+       } else {
+               dstPathSpecified = true
        }
        dstPath = filepath.Clean(dstPath)
        if filepath.IsAbs(dstPath) {
                return errors.New("Relative destination path required")
        }
        }
        dstPath = filepath.Clean(dstPath)
        if filepath.IsAbs(dstPath) {
                return errors.New("Relative destination path required")
        }
-       pkt, err := NewPkt(PktTypeFile, nice, []byte(dstPath))
+       reader, closer, fileSize, archived, err := prepareTxFile(srcPath)
+       if closer != nil {
+               defer closer.Close()
+       }
        if err != nil {
                return err
        }
        if err != nil {
                return err
        }
-       reader, src, fileSize, err := prepareTxFile(srcPath)
-       if src != nil {
-               defer src.Close()
+       if archived && !dstPathSpecified {
+               dstPath += TarExt
        }
        }
+       pkt, err := NewPkt(PktTypeFile, nice, []byte(dstPath))
        if err != nil {
                return err
        }
        if err != nil {
                return err
        }
@@ -208,23 +328,29 @@ func (ctx *Ctx) TxFileChunked(
        minSize int64,
        chunkSize int64,
 ) error {
        minSize int64,
        chunkSize int64,
 ) error {
+       dstPathSpecified := false
        if dstPath == "" {
                if srcPath == "-" {
                        return errors.New("Must provide destination filename")
                }
                dstPath = filepath.Base(srcPath)
        if dstPath == "" {
                if srcPath == "-" {
                        return errors.New("Must provide destination filename")
                }
                dstPath = filepath.Base(srcPath)
+       } else {
+               dstPathSpecified = true
        }
        dstPath = filepath.Clean(dstPath)
        if filepath.IsAbs(dstPath) {
                return errors.New("Relative destination path required")
        }
        }
        dstPath = filepath.Clean(dstPath)
        if filepath.IsAbs(dstPath) {
                return errors.New("Relative destination path required")
        }
-       reader, src, fileSize, err := prepareTxFile(srcPath)
-       if src != nil {
-               defer src.Close()
+       reader, closer, fileSize, archived, err := prepareTxFile(srcPath)
+       if closer != nil {
+               defer closer.Close()
        }
        if err != nil {
                return err
        }
        }
        if err != nil {
                return err
        }
+       if archived && !dstPathSpecified {
+               dstPath += TarExt
+       }
 
        if fileSize <= chunkSize {
                pkt, err := NewPkt(PktTypeFile, nice, []byte(dstPath))
 
        if fileSize <= chunkSize {
                pkt, err := NewPkt(PktTypeFile, nice, []byte(dstPath))