2016-08-26 18:29:05 +08:00
|
|
|
package metainfo
|
|
|
|
|
|
|
|
import (
|
|
|
|
"errors"
|
|
|
|
"fmt"
|
|
|
|
"io"
|
|
|
|
"os"
|
|
|
|
"path/filepath"
|
|
|
|
"strings"
|
|
|
|
|
2022-11-02 20:03:25 +08:00
|
|
|
"github.com/anacrolix/missinggo/v2/slices"
|
2016-08-26 18:29:05 +08:00
|
|
|
)
|
|
|
|
|
|
|
|
// The info dictionary.
|
|
|
|
type Info struct {
|
2022-03-17 11:59:36 +08:00
|
|
|
PieceLength int64 `bencode:"piece length"` // BEP3
|
|
|
|
Pieces []byte `bencode:"pieces"` // BEP3
|
|
|
|
Name string `bencode:"name"` // BEP3
|
|
|
|
NameUtf8 string `bencode:"name.utf-8,omitempty"`
|
2020-05-01 03:23:20 +08:00
|
|
|
Length int64 `bencode:"length,omitempty"` // BEP3, mutually exclusive with Files
|
|
|
|
Private *bool `bencode:"private,omitempty"` // BEP27
|
2017-01-18 09:09:51 +08:00
|
|
|
// TODO: Document this field.
|
|
|
|
Source string `bencode:"source,omitempty"`
|
2020-05-01 03:23:20 +08:00
|
|
|
Files []FileInfo `bencode:"files,omitempty"` // BEP3, mutually exclusive with Length
|
2016-08-26 18:29:05 +08:00
|
|
|
}
|
|
|
|
|
2021-09-02 08:21:46 +08:00
|
|
|
// The Info.Name field is "advisory". For multi-file torrents it's usually a suggested directory
|
|
|
|
// name. There are situations where we don't want a directory (like using the contents of a torrent
|
|
|
|
// as the immediate contents of a directory), or the name is invalid. Transmission will inject the
|
|
|
|
// name of the torrent file if it doesn't like the name, resulting in a different infohash
|
|
|
|
// (https://github.com/transmission/transmission/issues/1775). To work around these situations, we
|
|
|
|
// will use a sentinel name for compatibility with Transmission and to signal to our own client that
|
|
|
|
// we intended to have no directory name. By exposing it in the API we can check for references to
|
|
|
|
// this behaviour within this implementation.
|
|
|
|
const NoName = "-"
|
|
|
|
|
|
|
|
// This is a helper that sets Files and Pieces from a root path and its children.
|
2023-04-27 15:57:23 +08:00
|
|
|
// fullfill the info struct
|
2016-08-26 18:29:05 +08:00
|
|
|
func (info *Info) BuildFromFilePath(root string) (err error) {
|
2023-04-27 15:57:23 +08:00
|
|
|
// name
|
2021-09-01 14:17:23 +08:00
|
|
|
info.Name = func() string {
|
|
|
|
b := filepath.Base(root)
|
|
|
|
switch b {
|
|
|
|
case ".", "..", string(filepath.Separator):
|
2021-09-02 08:21:46 +08:00
|
|
|
return NoName
|
2021-09-01 14:17:23 +08:00
|
|
|
default:
|
|
|
|
return b
|
|
|
|
}
|
|
|
|
}()
|
2023-04-27 15:57:23 +08:00
|
|
|
|
|
|
|
// files
|
2016-08-26 18:29:05 +08:00
|
|
|
info.Files = nil
|
|
|
|
err = filepath.Walk(root, func(path string, fi os.FileInfo, err error) error {
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
if fi.IsDir() {
|
|
|
|
// Directories are implicit in torrent files.
|
|
|
|
return nil
|
|
|
|
} else if path == root {
|
|
|
|
// The root is a file.
|
|
|
|
info.Length = fi.Size()
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
relPath, err := filepath.Rel(root, path)
|
|
|
|
if err != nil {
|
|
|
|
return fmt.Errorf("error getting relative path: %s", err)
|
|
|
|
}
|
|
|
|
info.Files = append(info.Files, FileInfo{
|
|
|
|
Path: strings.Split(relPath, string(filepath.Separator)),
|
|
|
|
Length: fi.Size(),
|
|
|
|
})
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
if err != nil {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
slices.Sort(info.Files, func(l, r FileInfo) bool {
|
|
|
|
return strings.Join(l.Path, "/") < strings.Join(r.Path, "/")
|
|
|
|
})
|
2023-04-27 15:57:23 +08:00
|
|
|
|
|
|
|
// piece length
|
2022-06-27 17:25:38 +08:00
|
|
|
if info.PieceLength == 0 {
|
|
|
|
info.PieceLength = ChoosePieceLength(info.TotalLength())
|
|
|
|
}
|
2023-04-27 15:57:23 +08:00
|
|
|
|
|
|
|
// pieces
|
2016-08-26 18:29:05 +08:00
|
|
|
err = info.GeneratePieces(func(fi FileInfo) (io.ReadCloser, error) {
|
|
|
|
return os.Open(filepath.Join(root, strings.Join(fi.Path, string(filepath.Separator))))
|
|
|
|
})
|
|
|
|
if err != nil {
|
|
|
|
err = fmt.Errorf("error generating pieces: %s", err)
|
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-05-04 16:38:04 +08:00
|
|
|
// 根据导入的模型确定name
|
|
|
|
// 需要设置info.Length为byte数组的长度, 也是整个文件(内存)的长度
|
|
|
|
func (info *Info) BuildFromMemory(byteData []byte, name string) (err error) {
|
|
|
|
// length
|
|
|
|
length := info.Length
|
|
|
|
if length == 0 {
|
|
|
|
info.Length = int64(len(byteData))
|
|
|
|
length = info.Length
|
|
|
|
}
|
|
|
|
|
|
|
|
// name
|
|
|
|
info.Name = name
|
|
|
|
|
|
|
|
// files
|
|
|
|
// regard the memory block as single file
|
|
|
|
info.Files = append(info.Files, FileInfo{
|
|
|
|
Path: []string{name},
|
|
|
|
Length: length,
|
|
|
|
})
|
|
|
|
|
|
|
|
// piece length
|
|
|
|
if info.PieceLength == 0 {
|
|
|
|
info.PieceLength = ChoosePieceLength(info.TotalLength())
|
|
|
|
}
|
|
|
|
|
|
|
|
// pieces
|
|
|
|
err = info.GeneratePiecesFromMemory(byteData)
|
|
|
|
if err != nil {
|
|
|
|
err = fmt.Errorf("error generating pieces: %s", err)
|
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2017-12-29 09:16:05 +08:00
|
|
|
// Concatenates all the files in the torrent into w. open is a function that
|
|
|
|
// gets at the contents of the given file.
|
2016-08-26 18:29:05 +08:00
|
|
|
func (info *Info) writeFiles(w io.Writer, open func(fi FileInfo) (io.ReadCloser, error)) error {
|
|
|
|
for _, fi := range info.UpvertedFiles() {
|
|
|
|
r, err := open(fi)
|
|
|
|
if err != nil {
|
|
|
|
return fmt.Errorf("error opening %v: %s", fi, err)
|
|
|
|
}
|
|
|
|
wn, err := io.CopyN(w, r, fi.Length)
|
|
|
|
r.Close()
|
2017-12-29 09:18:55 +08:00
|
|
|
if wn != fi.Length {
|
|
|
|
return fmt.Errorf("error copying %v: %s", fi, err)
|
2016-08-26 18:29:05 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2017-12-29 09:16:05 +08:00
|
|
|
// Sets Pieces (the block of piece hashes in the Info) by using the passed
|
|
|
|
// function to get at the torrent data.
|
2020-05-13 12:32:20 +08:00
|
|
|
func (info *Info) GeneratePieces(open func(fi FileInfo) (io.ReadCloser, error)) (err error) {
|
2016-08-26 18:29:05 +08:00
|
|
|
if info.PieceLength == 0 {
|
|
|
|
return errors.New("piece length must be non-zero")
|
|
|
|
}
|
|
|
|
pr, pw := io.Pipe()
|
|
|
|
go func() {
|
|
|
|
err := info.writeFiles(pw, open)
|
|
|
|
pw.CloseWithError(err)
|
|
|
|
}()
|
|
|
|
defer pr.Close()
|
2020-05-13 12:32:20 +08:00
|
|
|
info.Pieces, err = GeneratePieces(pr, info.PieceLength, nil)
|
|
|
|
return
|
2016-08-26 18:29:05 +08:00
|
|
|
}
|
|
|
|
|
2023-05-04 16:38:04 +08:00
|
|
|
func (info *Info) GeneratePiecesFromMemory(byteData []byte) (err error) {
|
|
|
|
if info.PieceLength == 0 {
|
|
|
|
return errors.New("piece length must be non-zero")
|
|
|
|
}
|
|
|
|
info.Pieces, err = GeneratePiecesFromMemory(byteData, info.PieceLength,int(info.TotalLength()),nil)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2016-08-26 18:29:05 +08:00
|
|
|
func (info *Info) TotalLength() (ret int64) {
|
|
|
|
if info.IsDir() {
|
|
|
|
for _, fi := range info.Files {
|
|
|
|
ret += fi.Length
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
ret = info.Length
|
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
func (info *Info) NumPieces() int {
|
|
|
|
return len(info.Pieces) / 20
|
|
|
|
}
|
|
|
|
|
|
|
|
func (info *Info) IsDir() bool {
|
|
|
|
return len(info.Files) != 0
|
|
|
|
}
|
|
|
|
|
|
|
|
// The files field, converted up from the old single-file in the parent info
|
|
|
|
// dict if necessary. This is a helper to avoid having to conditionally handle
|
|
|
|
// single and multi-file torrent infos.
|
|
|
|
func (info *Info) UpvertedFiles() []FileInfo {
|
|
|
|
if len(info.Files) == 0 {
|
|
|
|
return []FileInfo{{
|
|
|
|
Length: info.Length,
|
|
|
|
// Callers should determine that Info.Name is the basename, and
|
|
|
|
// thus a regular file.
|
|
|
|
Path: nil,
|
|
|
|
}}
|
|
|
|
}
|
|
|
|
return info.Files
|
|
|
|
}
|
|
|
|
|
|
|
|
func (info *Info) Piece(index int) Piece {
|
2018-07-12 07:15:15 +08:00
|
|
|
return Piece{info, pieceIndex(index)}
|
2016-08-26 18:29:05 +08:00
|
|
|
}
|
2022-03-17 12:07:10 +08:00
|
|
|
|
|
|
|
func (info Info) BestName() string {
|
|
|
|
if info.NameUtf8 != "" {
|
|
|
|
return info.NameUtf8
|
|
|
|
}
|
|
|
|
return info.Name
|
|
|
|
}
|