2
0
mirror of synced 2025-02-24 06:38:14 +00:00

Merge remote-tracking branch 'libtorgo/master' into HEAD

This commit is contained in:
Matt Joiner 2015-04-27 14:36:35 +10:00
commit ce00bd0791
18 changed files with 2028 additions and 0 deletions

1
README Normal file
View File

@ -0,0 +1 @@
BitTorrent Go library, work in progress..

1
bencode/README Normal file
View File

@ -0,0 +1 @@
Bencode encoding/decoding sub package. Uses similar API design to Go's json package.

Binary file not shown.

165
bencode/api.go Normal file
View File

@ -0,0 +1,165 @@
package bencode
import (
"bufio"
"bytes"
"fmt"
"io"
"reflect"
)
//----------------------------------------------------------------------------
// Errors
//----------------------------------------------------------------------------
// In case if marshaler cannot encode a type, it will return this error. Typical
// example of such type is float32/float64 which has no bencode representation.
type MarshalTypeError struct {
Type reflect.Type
}
func (this *MarshalTypeError) Error() string {
return "bencode: unsupported type: " + this.Type.String()
}
// Unmarshal argument must be a non-nil value of some pointer type.
type UnmarshalInvalidArgError struct {
Type reflect.Type
}
func (e *UnmarshalInvalidArgError) Error() string {
if e.Type == nil {
return "bencode: Unmarshal(nil)"
}
if e.Type.Kind() != reflect.Ptr {
return "bencode: Unmarshal(non-pointer " + e.Type.String() + ")"
}
return "bencode: Unmarshal(nil " + e.Type.String() + ")"
}
// Unmarshaler spotted a value that was not appropriate for a given Go value.
type UnmarshalTypeError struct {
Value string
Type reflect.Type
}
func (e *UnmarshalTypeError) Error() string {
return "bencode: value (" + e.Value + ") is not appropriate for type: " +
e.Type.String()
}
// Unmarshaler tried to write to an unexported (therefore unwritable) field.
type UnmarshalFieldError struct {
Key string
Type reflect.Type
Field reflect.StructField
}
func (e *UnmarshalFieldError) Error() string {
return "bencode: key \"" + e.Key + "\" led to an unexported field \"" +
e.Field.Name + "\" in type: " + e.Type.String()
}
// Malformed bencode input, unmarshaler failed to parse it.
type SyntaxError struct {
Offset int64 // location of the error
What error // error description
}
func (e *SyntaxError) Error() string {
return fmt.Sprintf("bencode: syntax error (offset: %d): %s", e.Offset, e.What)
}
// A non-nil error was returned after calling MarshalBencode on a type which
// implements the Marshaler interface.
type MarshalerError struct {
Type reflect.Type
Err error
}
func (e *MarshalerError) Error() string {
return "bencode: error calling MarshalBencode for type " + e.Type.String() + ": " + e.Err.Error()
}
// A non-nil error was returned after calling UnmarshalBencode on a type which
// implements the Unmarshaler interface.
type UnmarshalerError struct {
Type reflect.Type
Err error
}
func (e *UnmarshalerError) Error() string {
return "bencode: error calling UnmarshalBencode for type " + e.Type.String() + ": " + e.Err.Error()
}
//----------------------------------------------------------------------------
// Interfaces
//----------------------------------------------------------------------------
// Any type which implements this interface, will be marshaled using the
// specified method.
type Marshaler interface {
MarshalBencode() ([]byte, error)
}
// Any type which implements this interface, will be unmarshaled using the
// specified method.
type Unmarshaler interface {
UnmarshalBencode([]byte) error
}
//----------------------------------------------------------------------------
// Stateless interface
//----------------------------------------------------------------------------
// Marshal the value 'v' to the bencode form, return the result as []byte and an
// error if any.
func Marshal(v interface{}) ([]byte, error) {
var buf bytes.Buffer
e := encoder{Writer: bufio.NewWriter(&buf)}
err := e.encode(v)
if err != nil {
return nil, err
}
return buf.Bytes(), nil
}
// Unmarshal the bencode value in the 'data' to a value pointed by the 'v'
// pointer, return a non-nil error if any.
func Unmarshal(data []byte, v interface{}) error {
e := decoder{Reader: bufio.NewReader(bytes.NewBuffer(data))}
return e.decode(v)
}
//----------------------------------------------------------------------------
// Stateful interface
//----------------------------------------------------------------------------
type Decoder struct {
d decoder
}
func NewDecoder(r io.Reader) *Decoder {
return &Decoder{decoder{Reader: bufio.NewReader(r)}}
}
func (d *Decoder) Decode(v interface{}) error {
return d.d.decode(v)
}
type Encoder struct {
e encoder
}
func NewEncoder(w io.Writer) *Encoder {
return &Encoder{encoder{Writer: bufio.NewWriter(w)}}
}
func (e *Encoder) Encode(v interface{}) error {
err := e.e.encode(v)
if err != nil {
return err
}
return nil
}

80
bencode/both_test.go Normal file
View File

@ -0,0 +1,80 @@
package bencode
import "testing"
import "bytes"
import "io/ioutil"
func load_file(name string, t *testing.T) []byte {
data, err := ioutil.ReadFile(name)
if err != nil {
t.Fatal(err)
}
return data
}
func test_file_interface(t *testing.T, filename string) {
data1 := load_file(filename, t)
var iface interface{}
err := Unmarshal(data1, &iface)
if err != nil {
t.Fatal(err)
}
data2, err := Marshal(iface)
if err != nil {
t.Fatal(err)
}
if !bytes.Equal(data1, data2) {
t.Fatalf("equality expected\n")
}
}
func TestBothInterface(t *testing.T) {
test_file_interface(t, "_testdata/archlinux-2011.08.19-netinstall-i686.iso.torrent")
test_file_interface(t, "_testdata/continuum.torrent")
}
type torrent_file struct {
Info struct {
Name string `bencode:"name"`
Length int64 `bencode:"length"`
MD5Sum string `bencode:"md5sum,omitempty"`
PieceLength int64 `bencode:"piece length"`
Pieces string `bencode:"pieces"`
Private bool `bencode:"private,omitempty"`
} `bencode:"info"`
Announce string `bencode:"announce"`
AnnounceList [][]string `bencode:"announce-list,omitempty"`
CreationDate int64 `bencode:"creation date,omitempty"`
Comment string `bencode:"comment,omitempty"`
CreatedBy string `bencode:"created by,omitempty"`
URLList interface{} `bencode:"url-list,omitempty"`
}
func test_file(t *testing.T, filename string) {
data1 := load_file(filename, t)
var f torrent_file
err := Unmarshal(data1, &f)
if err != nil {
t.Fatal(err)
}
data2, err := Marshal(&f)
if err != nil {
t.Fatal(err)
}
if !bytes.Equal(data1, data2) {
println(string(data2))
t.Fatalf("equality expected")
}
}
func TestBoth(t *testing.T) {
test_file(t, "_testdata/archlinux-2011.08.19-netinstall-i686.iso.torrent")
}

579
bencode/decode.go Normal file
View File

@ -0,0 +1,579 @@
package bencode
import (
"bufio"
"bytes"
"errors"
"io"
"reflect"
"runtime"
"strconv"
"strings"
)
type decoder struct {
*bufio.Reader
offset int64
buf bytes.Buffer
key string
}
func (d *decoder) decode(v interface{}) (err error) {
defer func() {
if e := recover(); e != nil {
if _, ok := e.(runtime.Error); ok {
panic(e)
}
err = e.(error)
}
}()
pv := reflect.ValueOf(v)
if pv.Kind() != reflect.Ptr || pv.IsNil() {
return &UnmarshalInvalidArgError{reflect.TypeOf(v)}
}
d.parse_value(pv.Elem())
return nil
}
func check_for_unexpected_eof(err error, offset int64) {
if err == io.EOF {
panic(&SyntaxError{
Offset: offset,
What: io.ErrUnexpectedEOF,
})
}
}
func (d *decoder) read_byte() byte {
b, err := d.ReadByte()
if err != nil {
check_for_unexpected_eof(err, d.offset)
panic(err)
}
d.offset++
return b
}
// reads data writing it to 'd.buf' until 'sep' byte is encountered, 'sep' byte
// is consumed, but not included into the 'd.buf'
func (d *decoder) read_until(sep byte) {
for {
b := d.read_byte()
if b == sep {
return
}
d.buf.WriteByte(b)
}
}
func check_for_int_parse_error(err error, offset int64) {
if err != nil {
panic(&SyntaxError{
Offset: offset,
What: err,
})
}
}
// called when 'i' was consumed
func (d *decoder) parse_int(v reflect.Value) {
start := d.offset - 1
d.read_until('e')
if d.buf.Len() == 0 {
panic(&SyntaxError{
Offset: start,
What: errors.New("empty integer value"),
})
}
switch v.Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
n, err := strconv.ParseInt(d.buf.String(), 10, 64)
check_for_int_parse_error(err, start)
if v.OverflowInt(n) {
panic(&UnmarshalTypeError{
Value: "integer " + d.buf.String(),
Type: v.Type(),
})
}
v.SetInt(n)
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
n, err := strconv.ParseUint(d.buf.String(), 10, 64)
check_for_int_parse_error(err, start)
if v.OverflowUint(n) {
panic(&UnmarshalTypeError{
Value: "integer " + d.buf.String(),
Type: v.Type(),
})
}
v.SetUint(n)
case reflect.Bool:
v.SetBool(d.buf.String() != "0")
default:
panic(&UnmarshalTypeError{
Value: "integer " + d.buf.String(),
Type: v.Type(),
})
}
d.buf.Reset()
}
func (d *decoder) parse_string(v reflect.Value) {
start := d.offset - 1
// read the string length first
d.read_until(':')
length, err := strconv.ParseInt(d.buf.String(), 10, 64)
check_for_int_parse_error(err, start)
d.buf.Reset()
n, err := io.CopyN(&d.buf, d, length)
d.offset += n
if err != nil {
check_for_unexpected_eof(err, d.offset)
panic(&SyntaxError{
Offset: d.offset,
What: errors.New("unexpected I/O error: " + err.Error()),
})
}
switch v.Kind() {
case reflect.String:
v.SetString(d.buf.String())
case reflect.Slice:
if v.Type().Elem().Kind() != reflect.Uint8 {
panic(&UnmarshalTypeError{
Value: "string",
Type: v.Type(),
})
}
sl := make([]byte, len(d.buf.Bytes()))
copy(sl, d.buf.Bytes())
v.Set(reflect.ValueOf(sl))
default:
panic(&UnmarshalTypeError{
Value: "string",
Type: v.Type(),
})
}
d.buf.Reset()
}
func (d *decoder) parse_dict(v reflect.Value) {
switch v.Kind() {
case reflect.Map:
t := v.Type()
if t.Key().Kind() != reflect.String {
panic(&UnmarshalTypeError{
Value: "object",
Type: t,
})
}
if v.IsNil() {
v.Set(reflect.MakeMap(t))
}
case reflect.Struct:
default:
panic(&UnmarshalTypeError{
Value: "object",
Type: v.Type(),
})
}
var map_elem reflect.Value
// so, at this point 'd' byte was consumed, let's just read key/value
// pairs one by one
for {
var valuev reflect.Value
keyv := reflect.ValueOf(&d.key).Elem()
if !d.parse_value(keyv) {
return
}
// get valuev as a map value or as a struct field
switch v.Kind() {
case reflect.Map:
elem_type := v.Type().Elem()
if !map_elem.IsValid() {
map_elem = reflect.New(elem_type).Elem()
} else {
map_elem.Set(reflect.Zero(elem_type))
}
valuev = map_elem
case reflect.Struct:
var f reflect.StructField
var ok bool
t := v.Type()
for i, n := 0, t.NumField(); i < n; i++ {
f = t.Field(i)
tag := f.Tag.Get("bencode")
if tag == "-" {
continue
}
if f.Anonymous {
continue
}
tag_name, _ := parse_tag(tag)
if tag_name == d.key {
ok = true
break
}
if f.Name == d.key {
ok = true
break
}
if strings.EqualFold(f.Name, d.key) {
ok = true
break
}
}
if ok {
if f.PkgPath != "" {
panic(&UnmarshalFieldError{
Key: d.key,
Type: v.Type(),
Field: f,
})
} else {
valuev = v.FieldByIndex(f.Index)
}
} else {
_, ok := d.parse_value_interface()
if !ok {
panic(&SyntaxError{
Offset: d.offset,
What: errors.New("unexpected end of dict, no matching value for a given key"),
})
}
continue
}
}
// now we need to actually parse it
if !d.parse_value(valuev) {
panic(&SyntaxError{
Offset: d.offset,
What: errors.New("unexpected end of dict, no matching value for a given key"),
})
}
if v.Kind() == reflect.Map {
v.SetMapIndex(keyv, valuev)
}
}
}
func (d *decoder) parse_list(v reflect.Value) {
switch v.Kind() {
case reflect.Array, reflect.Slice:
default:
panic(&UnmarshalTypeError{
Value: "array",
Type: v.Type(),
})
}
i := 0
for {
if v.Kind() == reflect.Slice && i >= v.Len() {
v.Set(reflect.Append(v, reflect.Zero(v.Type().Elem())))
}
ok := false
if i < v.Len() {
ok = d.parse_value(v.Index(i))
} else {
_, ok = d.parse_value_interface()
}
if !ok {
break
}
i++
}
if i < v.Len() {
if v.Kind() == reflect.Array {
z := reflect.Zero(v.Type().Elem())
for n := v.Len(); i < n; i++ {
v.Index(i).Set(z)
}
} else {
v.SetLen(i)
}
}
if i == 0 && v.Kind() == reflect.Slice {
v.Set(reflect.MakeSlice(v.Type(), 0, 0))
}
}
func (d *decoder) read_one_value() bool {
b, err := d.ReadByte()
if err != nil {
panic(err)
}
if b == 'e' {
d.UnreadByte()
return false
} else {
d.offset++
d.buf.WriteByte(b)
}
switch b {
case 'd', 'l':
// read until there is nothing to read
for d.read_one_value() {
}
// consume 'e' as well
b = d.read_byte()
d.buf.WriteByte(b)
case 'i':
d.read_until('e')
d.buf.WriteString("e")
default:
if b >= '0' && b <= '9' {
start := d.buf.Len() - 1
d.read_until(':')
length, err := strconv.ParseInt(d.buf.String()[start:], 10, 64)
check_for_int_parse_error(err, d.offset-1)
d.buf.WriteString(":")
n, err := io.CopyN(&d.buf, d, length)
d.offset += n
if err != nil {
check_for_unexpected_eof(err, d.offset)
panic(&SyntaxError{
Offset: d.offset,
What: errors.New("unexpected I/O error: " + err.Error()),
})
}
break
}
// unknown value
panic(&SyntaxError{
Offset: d.offset - 1,
What: errors.New("unknown value type (invalid bencode?)"),
})
}
return true
}
func (d *decoder) parse_unmarshaler(v reflect.Value) bool {
m, ok := v.Interface().(Unmarshaler)
if !ok {
// T doesn't work, try *T
if v.Kind() != reflect.Ptr && v.CanAddr() {
m, ok = v.Addr().Interface().(Unmarshaler)
if ok {
v = v.Addr()
}
}
}
if ok && (v.Kind() != reflect.Ptr || !v.IsNil()) {
if d.read_one_value() {
err := m.UnmarshalBencode(d.buf.Bytes())
d.buf.Reset()
if err != nil {
panic(&UnmarshalerError{v.Type(), err})
}
return true
}
d.buf.Reset()
}
return false
}
// returns true if there was a value and it's now stored in 'v', otherwise there
// was an end symbol ("e") and no value was stored
func (d *decoder) parse_value(v reflect.Value) bool {
// we support one level of indirection at the moment
if v.Kind() == reflect.Ptr {
// if the pointer is nil, allocate a new element of the type it
// points to
if v.IsNil() {
v.Set(reflect.New(v.Type().Elem()))
}
v = v.Elem()
}
if d.parse_unmarshaler(v) {
return true
}
// common case: interface{}
if v.Kind() == reflect.Interface && v.NumMethod() == 0 {
iface, _ := d.parse_value_interface()
v.Set(reflect.ValueOf(iface))
return true
}
b, err := d.ReadByte()
if err != nil {
panic(err)
}
d.offset++
switch b {
case 'e':
return false
case 'd':
d.parse_dict(v)
case 'l':
d.parse_list(v)
case 'i':
d.parse_int(v)
default:
if b >= '0' && b <= '9' {
// string
// append first digit of the length to the buffer
d.buf.WriteByte(b)
d.parse_string(v)
break
}
// unknown value
panic(&SyntaxError{
Offset: d.offset - 1,
What: errors.New("unknown value type (invalid bencode?)"),
})
}
return true
}
func (d *decoder) parse_value_interface() (interface{}, bool) {
b, err := d.ReadByte()
if err != nil {
panic(err)
}
d.offset++
switch b {
case 'e':
return nil, false
case 'd':
return d.parse_dict_interface(), true
case 'l':
return d.parse_list_interface(), true
case 'i':
return d.parse_int_interface(), true
default:
if b >= '0' && b <= '9' {
// string
// append first digit of the length to the buffer
d.buf.WriteByte(b)
return d.parse_string_interface(), true
}
// unknown value
panic(&SyntaxError{
Offset: d.offset - 1,
What: errors.New("unknown value type (invalid bencode?)"),
})
}
panic("unreachable")
}
func (d *decoder) parse_int_interface() interface{} {
start := d.offset - 1
d.read_until('e')
if d.buf.Len() == 0 {
panic(&SyntaxError{
Offset: start,
What: errors.New("empty integer value"),
})
}
n, err := strconv.ParseInt(d.buf.String(), 10, 64)
check_for_int_parse_error(err, start)
d.buf.Reset()
return n
}
func (d *decoder) parse_string_interface() interface{} {
start := d.offset - 1
// read the string length first
d.read_until(':')
length, err := strconv.ParseInt(d.buf.String(), 10, 64)
check_for_int_parse_error(err, start)
d.buf.Reset()
n, err := io.CopyN(&d.buf, d, length)
d.offset += n
if err != nil {
check_for_unexpected_eof(err, d.offset)
panic(&SyntaxError{
Offset: d.offset,
What: errors.New("unexpected I/O error: " + err.Error()),
})
}
s := d.buf.String()
d.buf.Reset()
return s
}
func (d *decoder) parse_dict_interface() interface{} {
dict := make(map[string]interface{})
for {
keyi, ok := d.parse_value_interface()
if !ok {
break
}
key, ok := keyi.(string)
if !ok {
panic(&SyntaxError{
Offset: d.offset,
What: errors.New("non-string key in a dict"),
})
}
valuei, ok := d.parse_value_interface()
if !ok {
panic(&SyntaxError{
Offset: d.offset,
What: errors.New("unexpected end of dict, no matching value for a given key"),
})
}
dict[key] = valuei
}
return dict
}
func (d *decoder) parse_list_interface() interface{} {
var list []interface{}
for {
valuei, ok := d.parse_value_interface()
if !ok {
break
}
list = append(list, valuei)
}
if list == nil {
list = make([]interface{}, 0, 0)
}
return list
}

77
bencode/decode_test.go Normal file
View File

@ -0,0 +1,77 @@
package bencode
import "testing"
import "reflect"
type random_decode_test struct {
data string
expected interface{}
}
var random_decode_tests = []random_decode_test{
{"i57e", int64(57)},
{"i-9223372036854775808e", int64(-9223372036854775808)},
{"5:hello", "hello"},
{"29:unicode test проверка", "unicode test проверка"},
{"d1:ai5e1:b5:helloe", map[string]interface{}{"a": int64(5), "b": "hello"}},
{"li5ei10ei15ei20e7:bencodee",
[]interface{}{int64(5), int64(10), int64(15), int64(20), "bencode"}},
{"ldedee", []interface{}{map[string]interface{}{}, map[string]interface{}{}}},
{"le", []interface{}{}},
}
func TestRandomDecode(t *testing.T) {
for _, test := range random_decode_tests {
var value interface{}
err := Unmarshal([]byte(test.data), &value)
if err != nil {
t.Error(err)
continue
}
if !reflect.DeepEqual(test.expected, value) {
t.Errorf("got: %v (%T), expected: %v (%T)\n",
value, value, test.expected, test.expected)
}
}
}
func check_error(t *testing.T, err error) {
if err != nil {
t.Error(err)
}
}
func assert_equal(t *testing.T, x, y interface{}) {
if !reflect.DeepEqual(x, y) {
t.Errorf("got: %v (%T), expected: %v (%T)\n", x, x, y, y)
}
}
type unmarshaler_int struct {
x int
}
func (this *unmarshaler_int) UnmarshalBencode(data []byte) error {
return Unmarshal(data, &this.x)
}
type unmarshaler_string struct {
x string
}
func (this *unmarshaler_string) UnmarshalBencode(data []byte) error {
this.x = string(data)
return nil
}
func TestUnmarshalerBencode(t *testing.T) {
var i unmarshaler_int
var ss []unmarshaler_string
check_error(t, Unmarshal([]byte("i71e"), &i))
assert_equal(t, i.x, 71)
check_error(t, Unmarshal([]byte("l5:hello5:fruit3:waye"), &ss))
assert_equal(t, ss[0].x, "5:hello")
assert_equal(t, ss[1].x, "5:fruit")
assert_equal(t, ss[2].x, "3:way")
}

248
bencode/encode.go Normal file
View File

@ -0,0 +1,248 @@
package bencode
import "bufio"
import "reflect"
import "runtime"
import "strconv"
import "sync"
import "sort"
func is_empty_value(v reflect.Value) bool {
switch v.Kind() {
case reflect.Array, reflect.Map, reflect.Slice, reflect.String:
return v.Len() == 0
case reflect.Bool:
return !v.Bool()
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return v.Int() == 0
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
return v.Uint() == 0
case reflect.Float32, reflect.Float64:
return v.Float() == 0
case reflect.Interface, reflect.Ptr:
return v.IsNil()
}
return false
}
type encoder struct {
*bufio.Writer
scratch [64]byte
}
func (e *encoder) encode(v interface{}) (err error) {
defer func() {
if e := recover(); e != nil {
if _, ok := e.(runtime.Error); ok {
panic(e)
}
err = e.(error)
}
}()
e.reflect_value(reflect.ValueOf(v))
return e.Flush()
}
type string_values []reflect.Value
func (sv string_values) Len() int { return len(sv) }
func (sv string_values) Swap(i, j int) { sv[i], sv[j] = sv[j], sv[i] }
func (sv string_values) Less(i, j int) bool { return sv.get(i) < sv.get(j) }
func (sv string_values) get(i int) string { return sv[i].String() }
func (e *encoder) write(s []byte) {
_, err := e.Write(s)
if err != nil {
panic(err)
}
}
func (e *encoder) write_string(s string) {
_, err := e.WriteString(s)
if err != nil {
panic(err)
}
}
func (e *encoder) reflect_string(s string) {
b := strconv.AppendInt(e.scratch[:0], int64(len(s)), 10)
e.write(b)
e.write_string(":")
e.write_string(s)
}
func (e *encoder) reflect_byte_slice(s []byte) {
b := strconv.AppendInt(e.scratch[:0], int64(len(s)), 10)
e.write(b)
e.write_string(":")
e.write(s)
}
// returns true if the value implements Marshaler interface and marshaling was
// done successfully
func (e *encoder) reflect_marshaler(v reflect.Value) bool {
m, ok := v.Interface().(Marshaler)
if !ok {
// T doesn't work, try *T
if v.Kind() != reflect.Ptr && v.CanAddr() {
m, ok = v.Addr().Interface().(Marshaler)
if ok {
v = v.Addr()
}
}
}
if ok && (v.Kind() != reflect.Ptr || !v.IsNil()) {
data, err := m.MarshalBencode()
if err != nil {
panic(&MarshalerError{v.Type(), err})
}
e.write(data)
return true
}
return false
}
func (e *encoder) reflect_value(v reflect.Value) {
if !v.IsValid() {
return
}
if e.reflect_marshaler(v) {
return
}
switch v.Kind() {
case reflect.Bool:
if v.Bool() {
e.write_string("i1e")
} else {
e.write_string("i0e")
}
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
b := strconv.AppendInt(e.scratch[:0], v.Int(), 10)
e.write_string("i")
e.write(b)
e.write_string("e")
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
b := strconv.AppendUint(e.scratch[:0], v.Uint(), 10)
e.write_string("i")
e.write(b)
e.write_string("e")
case reflect.String:
e.reflect_string(v.String())
case reflect.Struct:
e.write_string("d")
for _, ef := range encode_fields(v.Type()) {
field_value := v.Field(ef.i)
if ef.omit_empty && is_empty_value(field_value) {
continue
}
e.reflect_string(ef.tag)
e.reflect_value(field_value)
}
e.write_string("e")
case reflect.Map:
if v.Type().Key().Kind() != reflect.String {
panic(&MarshalTypeError{v.Type()})
}
if v.IsNil() {
e.write_string("de")
break
}
e.write_string("d")
sv := string_values(v.MapKeys())
sort.Sort(sv)
for _, key := range sv {
e.reflect_string(key.String())
e.reflect_value(v.MapIndex(key))
}
e.write_string("e")
case reflect.Slice:
if v.IsNil() {
e.write_string("le")
break
}
if v.Type().Elem().Kind() == reflect.Uint8 {
s := v.Bytes()
e.reflect_byte_slice(s)
break
}
fallthrough
case reflect.Array:
e.write_string("l")
for i, n := 0, v.Len(); i < n; i++ {
e.reflect_value(v.Index(i))
}
e.write_string("e")
case reflect.Interface, reflect.Ptr:
if v.IsNil() {
break
}
e.reflect_value(v.Elem())
default:
panic(&MarshalTypeError{v.Type()})
}
}
type encode_field struct {
i int
tag string
omit_empty bool
}
type encode_fields_sort_type []encode_field
func (ef encode_fields_sort_type) Len() int { return len(ef) }
func (ef encode_fields_sort_type) Swap(i, j int) { ef[i], ef[j] = ef[j], ef[i] }
func (ef encode_fields_sort_type) Less(i, j int) bool { return ef[i].tag < ef[j].tag }
var (
type_cache_lock sync.RWMutex
encode_fields_cache = make(map[reflect.Type][]encode_field)
)
func encode_fields(t reflect.Type) []encode_field {
type_cache_lock.RLock()
fs, ok := encode_fields_cache[t]
type_cache_lock.RUnlock()
if ok {
return fs
}
type_cache_lock.Lock()
defer type_cache_lock.Unlock()
fs, ok = encode_fields_cache[t]
if ok {
return fs
}
for i, n := 0, t.NumField(); i < n; i++ {
f := t.Field(i)
if f.PkgPath != "" {
continue
}
if f.Anonymous {
continue
}
var ef encode_field
ef.i = i
ef.tag = f.Name
tv := f.Tag.Get("bencode")
if tv != "" {
if tv == "-" {
continue
}
name, opts := parse_tag(tv)
ef.tag = name
ef.omit_empty = opts.contains("omitempty")
}
fs = append(fs, ef)
}
fss := encode_fields_sort_type(fs)
sort.Sort(fss)
encode_fields_cache[t] = fs
return fs
}

68
bencode/encode_test.go Normal file
View File

@ -0,0 +1,68 @@
package bencode
import "testing"
import "bytes"
import "fmt"
type random_encode_test struct {
value interface{}
expected string
}
type random_struct struct {
ABC int `bencode:"abc"`
SkipThisOne string `bencode:"-"`
CDE string
}
type dummy struct {
a, b, c int
}
func (d *dummy) MarshalBencode() ([]byte, error) {
var b bytes.Buffer
_, err := fmt.Fprintf(&b, "i%dei%dei%de", d.a + 1, d.b + 1, d.c + 1)
if err != nil {
return nil, err
}
return b.Bytes(), nil
}
var random_encode_tests = []random_encode_test{
{int(10), "i10e"},
{uint(10), "i10e"},
{"hello, world", "12:hello, world"},
{true, "i1e"},
{false, "i0e"},
{int8(-8), "i-8e"},
{int16(-16), "i-16e"},
{int32(32), "i32e"},
{int64(-64), "i-64e"},
{uint8(8), "i8e"},
{uint16(16), "i16e"},
{uint32(32), "i32e"},
{uint64(64), "i64e"},
{random_struct{123, "nono", "hello"}, "d3:CDE5:hello3:abci123ee"},
{map[string]string{"a": "b", "c": "d"}, "d1:a1:b1:c1:de"},
{[]byte{1, 2, 3, 4}, "4:\x01\x02\x03\x04"},
{[4]byte{1, 2, 3, 4}, "li1ei2ei3ei4ee"},
{nil, ""},
{[]byte{}, "0:"},
{"", "0:"},
{[]int{}, "le"},
{map[string]int{}, "de"},
{&dummy{1, 2, 3}, "i2ei3ei4e"},
}
func TestRandomEncode(t *testing.T) {
for _, test := range random_encode_tests {
data, err := Marshal(test.value)
if err != nil {
t.Fatal(err)
}
if !bytes.Equal(data, []byte(test.expected)) {
t.Errorf("got: %s, expected: %s\n",
string(data), string(test.expected))
}
}
}

34
bencode/tags.go Normal file
View File

@ -0,0 +1,34 @@
package bencode
import (
"strings"
)
type tag_options string
func parse_tag(tag string) (string, tag_options) {
if idx := strings.Index(tag, ","); idx != -1 {
return tag[:idx], tag_options(tag[idx+1:])
}
return tag, tag_options("")
}
func (this tag_options) contains(option_name string) bool {
if len(this) == 0 {
return false
}
s := string(this)
for s != "" {
var next string
i := strings.Index(s, ",")
if i != -1 {
s, next = s[:i], s[i+1:]
}
if s == option_name {
return true
}
s = next
}
return false
}

1
metainfo/README Normal file
View File

@ -0,0 +1 @@
A library for manipulating ".torrent" files.

Binary file not shown.

574
metainfo/builder.go Normal file
View File

@ -0,0 +1,574 @@
package metainfo
import (
"crypto/sha1"
"errors"
"github.com/anacrolix/libtorgo/bencode"
"hash"
"io"
"os"
"path/filepath"
"sort"
"time"
)
//----------------------------------------------------------------------------
// Build
//----------------------------------------------------------------------------
// The Builder type is responsible for .torrent files construction. Just
// instantiate it, call necessary methods and then call the .Build method. While
// waiting for completion you can use 'status' channel to get status reports.
type Builder struct {
batch_state
filesmap map[string]bool
}
// Adds a file to the builder queue. You may add one or more files.
func (b *Builder) AddFile(filename string) {
if b.filesmap == nil {
b.filesmap = make(map[string]bool)
}
filename, err := filepath.Abs(filename)
if err != nil {
panic(err)
}
b.filesmap[filename] = true
}
// Defines a name of the future torrent file. For single file torrents it's the
// recommended name of the contained file. For multiple files torrents it's the
// recommended name of the directory in which all of them will be
// stored. Calling this function is not required. In case if no name was
// specified, the builder will try to automatically assign it. It will use the
// name of the file if there is only one file in the queue or it will try to
// find the rightmost common directory of all the queued files and use its name as
// a torrent name. In case if name cannot be assigned automatically, it will use
// "unknown" as a torrent name.
func (b *Builder) SetName(name string) {
b.name = name
}
// Sets the length of a piece in the torrent file in bytes. The default is
// 256kb.
func (b *Builder) SetPieceLength(length int64) {
b.piece_length = length
}
// Sets the "private" flag. The default is false.
func (b *Builder) SetPrivate(v bool) {
b.private = v
}
// Add announce URL group. TODO: better explanation.
func (b *Builder) AddAnnounceGroup(group []string) {
b.announce_list = append(b.announce_list, group)
}
// Sets creation date. The default is time.Now() when the .Build method was
// called.
func (b *Builder) SetCreationDate(date time.Time) {
b.creation_date = date
}
// Sets the comment. The default is no comment.
func (b *Builder) SetComment(comment string) {
b.comment = comment
}
// Sets the "created by" parameter. The default is "libtorgo".
func (b *Builder) SetCreatedBy(createdby string) {
b.created_by = createdby
}
// Sets the "encoding" parameter. The default is "UTF-8".
func (b *Builder) SetEncoding(encoding string) {
b.encoding = encoding
}
// Add WebSeed URL to the list.
func (b *Builder) AddWebSeedURL(url string) {
b.urls = append(b.urls, url)
}
// Finalizes the Builder state and makes a Batch out of it. After calling that
// method, Builder becomes empty and you can use it to create another Batch if
// you will.
func (b *Builder) Submit() (*Batch, error) {
err := b.check_parameters()
if err != nil {
return nil, err
}
b.set_defaults()
batch := &Batch{
batch_state: b.batch_state,
}
const non_regular = os.ModeDir | os.ModeSymlink |
os.ModeDevice | os.ModeNamedPipe | os.ModeSocket
// convert a map to a slice, calculate sizes and split paths
batch.total_size = 0
batch.files = make([]file, 0, 10)
for f, _ := range b.filesmap {
var file file
fi, err := os.Stat(f)
if err != nil {
return nil, err
}
if fi.Mode()&non_regular != 0 {
return nil, errors.New(f + " is not a regular file")
}
file.abspath = f
file.splitpath = split_path(f)
file.size = fi.Size()
batch.files = append(batch.files, file)
batch.total_size += file.size
}
// find the rightmost common directory
if len(batch.files) == 1 {
sp := batch.files[0].splitpath
batch.default_name = sp[len(sp)-1]
} else {
common := batch.files[0].splitpath
for _, f := range batch.files {
if len(common) > len(f.splitpath) {
common = common[:len(f.splitpath)]
}
for i, n := 0, len(common); i < n; i++ {
if common[i] != f.splitpath[i] {
common = common[:i]
break
}
}
if len(common) == 0 {
break
}
}
if len(common) == 0 {
return nil, errors.New("no common rightmost folder was found for a set of queued files")
}
// found the common folder, let's strip that part from splitpath
// and setup the default name
batch.default_name = common[len(common)-1]
lcommon := len(common)
for i := range batch.files {
f := &batch.files[i]
f.splitpath = f.splitpath[lcommon:]
}
// and finally sort the files
sort.Sort(file_slice(batch.files))
}
// reset the builder state
b.batch_state = batch_state{}
b.filesmap = nil
return batch, nil
}
func (b *Builder) set_defaults() {
if b.piece_length == 0 {
b.piece_length = 256 * 1024
}
if b.creation_date.IsZero() {
b.creation_date = time.Now()
}
if b.created_by == "" {
b.created_by = "libtorgo"
}
if b.encoding == "" {
b.encoding = "UTF-8"
}
}
func (b *Builder) check_parameters() error {
// should be at least one file
if len(b.filesmap) == 0 {
return errors.New("no files were queued")
}
// let's clean up the announce_list
newal := make([][]string, 0, len(b.announce_list))
for _, ag := range b.announce_list {
ag = remove_empty_strings(ag)
// discard empty announce groups
if len(ag) == 0 {
continue
}
newal = append(newal, ag)
}
b.announce_list = newal
if len(b.announce_list) == 0 {
return errors.New("no announce groups were specified")
}
// and clean up the urls
b.urls = remove_empty_strings(b.urls)
return nil
}
//----------------------------------------------------------------------------
// Batch
//----------------------------------------------------------------------------
// Batch represents a snapshot of a builder state, ready for transforming it
// into a torrent file. Note that Batch contains two accessor methods you might
// be interested in. The TotalSize is the total size of all the files queued for
// hashing, you will use it for status reporting. The DefaultName is an
// automatically determined name of the torrent metainfo, you might want to use
// it for naming the .torrent file itself.
type Batch struct {
batch_state
files []file
total_size int64
default_name string
}
// Get a total size of all the files queued for hashing. Useful in conjunction
// with status reports.
func (b *Batch) TotalSize() int64 {
return b.total_size
}
// Get an automatically determined name of the future torrent metainfo. You can
// use it for a .torrent file in case user hasn't provided it specifically.
func (b *Batch) DefaultName() string {
return b.default_name
}
// Starts a process of building the torrent file. This function does everything
// in a separate goroutine and uses up to 'nworkers' of goroutines to perform
// SHA1 hashing. Therefore it will return almost immedately. It returns two
// channels, the first one is for completion awaiting, the second one is for
// getting status reports. Status report is a number of bytes hashed, you can
// get the total amount of bytes by inspecting the Batch.TotalSize method return
// value.
func (b *Batch) Start(w io.Writer, nworkers int) (<-chan error, <-chan int64) {
if nworkers <= 0 {
nworkers = 1
}
completion := make(chan error)
status := make(chan int64)
go func() {
// prepare workers
workers := make([]*worker, nworkers)
free_workers := make(chan *worker, nworkers)
for i := 0; i < nworkers; i++ {
workers[i] = new_worker(free_workers)
}
stop_workers := func() {
for _, w := range workers {
w.stop()
}
for _, w := range workers {
w.wait_for_stop()
}
}
// prepare files for reading
fr := files_reader{files: b.files}
npieces := b.total_size/b.piece_length + 1
b.pieces = make([]byte, 20*npieces)
hashed := int64(0)
// read all the pieces passing them to workers for hashing
var data []byte
for i := int64(0); i < npieces; i++ {
if data == nil {
data = make([]byte, b.piece_length)
}
nr, err := fr.Read(data)
if err != nil {
// EOF is not an eror if it was the last piece
if err == io.EOF {
if i != npieces-1 {
stop_workers()
completion <- err
return
}
} else {
stop_workers()
completion <- err
return
}
}
// cut the data slice to the amount of actual data read
data = data[:nr]
w := <-free_workers
data = w.queue(data, b.pieces[20*i:20*i+20])
// update and try to send the status report
if data != nil {
hashed += int64(len(data))
data = data[:cap(data)]
select {
case status <- hashed:
default:
}
}
}
stop_workers()
// at this point the hash was calculated and we're ready to
// write the torrent file
err := b.write_torrent(w)
if err != nil {
completion <- err
return
}
completion <- nil
}()
return completion, status
}
func (b *Batch) write_torrent(w io.Writer) error {
var td MetaInfo
td.Announce = b.announce_list[0][0]
if len(b.announce_list) != 1 || len(b.announce_list[0]) != 1 {
td.AnnounceList = b.announce_list
}
td.CreationDate = b.creation_date.Unix()
td.Comment = b.comment
td.CreatedBy = b.created_by
td.Encoding = b.encoding
switch {
case len(b.urls) == 0:
case len(b.urls) == 1:
td.URLList = b.urls[0]
default:
td.URLList = b.urls
}
td.Info.PieceLength = b.piece_length
td.Info.Pieces = b.pieces
if b.name == "" {
td.Info.Name = b.default_name
} else {
td.Info.Name = b.name
}
if len(b.files) == 1 {
td.Info.Length = b.files[0].size
} else {
td.Info.Files = make([]FileInfo, len(b.files))
for i, f := range b.files {
td.Info.Files[i] = FileInfo{
Path: f.splitpath,
Length: f.size,
}
}
}
td.Info.Private = b.private
e := bencode.NewEncoder(w)
return e.Encode(&td)
}
//----------------------------------------------------------------------------
// misc stuff
//----------------------------------------------------------------------------
// splits path into components (dirs and files), works only on absolute paths
func split_path(path string) []string {
var dir, file string
s := make([]string, 0, 5)
dir = path
for {
dir, file = filepath.Split(filepath.Clean(dir))
if file == "" {
break
}
s = append(s, file)
}
// reverse the slice
for i, n := 0, len(s)/2; i < n; i++ {
i2 := len(s) - i - 1
s[i], s[i2] = s[i2], s[i]
}
return s
}
// just a common data between the Builder and the Batch
type batch_state struct {
name string
piece_length int64
pieces []byte
private bool
announce_list [][]string
creation_date time.Time
comment string
created_by string
encoding string
urls []string
}
type file struct {
abspath string
splitpath []string
size int64
}
type file_slice []file
func (s file_slice) Len() int { return len(s) }
func (s file_slice) Less(i, j int) bool { return s[i].abspath < s[j].abspath }
func (s file_slice) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
func remove_empty_strings(slice []string) []string {
j := 0
for i, n := 0, len(slice); i < n; i++ {
if slice[i] == "" {
continue
}
slice[j] = slice[i]
j++
}
return slice[:j]
}
//----------------------------------------------------------------------------
// worker
//----------------------------------------------------------------------------
type worker struct {
msgbox chan bool
hash hash.Hash
// request
sha1 []byte
data []byte
}
// returns existing 'data'
func (w *worker) queue(data, sha1 []byte) []byte {
d := w.data
w.data = data
w.sha1 = sha1
w.msgbox <- false
return d
}
func (w *worker) stop() {
w.msgbox <- true
}
func (w *worker) wait_for_stop() {
<-w.msgbox
}
func new_worker(out chan<- *worker) *worker {
w := &worker{
msgbox: make(chan bool),
hash: sha1.New(),
}
go func() {
var sha1 [20]byte
for {
if <-w.msgbox {
w.msgbox <- true
return
}
w.hash.Reset()
w.hash.Write(w.data)
w.hash.Sum(sha1[:0])
copy(w.sha1, sha1[:])
out <- w
}
}()
out <- w
return w
}
//----------------------------------------------------------------------------
// files_reader
//----------------------------------------------------------------------------
type files_reader struct {
files []file
cur int
curfile *os.File
off int64
}
func (f *files_reader) Read(data []byte) (int, error) {
if f.cur >= len(f.files) {
return 0, io.EOF
}
if len(data) == 0 {
return 0, nil
}
read := 0
for len(data) > 0 {
file := &f.files[f.cur]
if f.curfile == nil {
var err error
f.curfile, err = os.Open(file.abspath)
if err != nil {
return read, err
}
}
// we need to read up to 'len(data)' bytes from current file
n := int64(len(data))
// unless there is not enough data in this file
if file.size-f.off < n {
n = file.size - f.off
}
// if there is no data in this file, try next one
if n == 0 {
err := f.curfile.Close()
if err != nil {
return read, err
}
f.curfile = nil
f.off = 0
f.cur++
if f.cur >= len(f.files) {
return read, io.EOF
}
continue
}
// read, handle errors
nr, err := f.curfile.Read(data[:n])
read += nr
f.off += int64(nr)
if err != nil {
return read, err
}
// ok, we've read nr bytes out of len(data), cut the data slice
data = data[nr:]
}
return read, nil
}

154
metainfo/metainfo.go Normal file
View File

@ -0,0 +1,154 @@
package metainfo
import (
"crypto/sha1"
"io"
"os"
"github.com/anacrolix/libtorgo/bencode"
)
// Information specific to a single file inside the MetaInfo structure.
type FileInfo struct {
Length int64 `bencode:"length"`
Path []string `bencode:"path"`
}
// Load a MetaInfo from an io.Reader. Returns a non-nil error in case of
// failure.
func Load(r io.Reader) (*MetaInfo, error) {
var mi MetaInfo
d := bencode.NewDecoder(r)
err := d.Decode(&mi)
if err != nil {
return nil, err
}
return &mi, nil
}
// Convenience function for loading a MetaInfo from a file.
func LoadFromFile(filename string) (*MetaInfo, error) {
f, err := os.Open(filename)
if err != nil {
return nil, err
}
defer f.Close()
return Load(f)
}
// The info dictionary.
type Info struct {
PieceLength int64 `bencode:"piece length"`
Pieces []byte `bencode:"pieces"`
Name string `bencode:"name"`
Length int64 `bencode:"length,omitempty"`
Private bool `bencode:"private,omitempty"`
Files []FileInfo `bencode:"files,omitempty"`
}
func (me *Info) TotalLength() (ret int64) {
if me.IsDir() {
for _, fi := range me.Files {
ret += fi.Length
}
} else {
ret = me.Length
}
return
}
func (me *Info) NumPieces() int {
return len(me.Pieces) / 20
}
type Piece interface {
Hash() []byte
Length() int64
Offset() int64
}
type piece struct {
Info *Info
i int
}
func (me piece) Length() int64 {
if me.i == me.Info.NumPieces()-1 {
return me.Info.TotalLength() - int64(me.i)*me.Info.PieceLength
}
return me.Info.PieceLength
}
func (me piece) Offset() int64 {
return int64(me.i) * me.Info.PieceLength
}
func (me piece) Hash() []byte {
return me.Info.Pieces[me.i*20 : (me.i+1)*20]
}
func (me *Info) Piece(i int) piece {
return piece{me, i}
}
func (i *Info) IsDir() bool {
return len(i.Files) != 0
}
// The files field, converted up from the old single-file in the parent info
// dict if necessary. This is a helper to avoid having to conditionally handle
// single and multi-file torrent infos.
func (i *Info) UpvertedFiles() []FileInfo {
if len(i.Files) == 0 {
return []FileInfo{{
Length: i.Length,
// Callers should determine that Info.Name is the basename, and
// thus a regular file.
Path: nil,
}}
}
return i.Files
}
// The info dictionary with its hash and raw bytes exposed, as these are
// important to Bittorrent.
type InfoEx struct {
Info
Hash []byte
Bytes []byte
}
var (
_ bencode.Marshaler = InfoEx{}
_ bencode.Unmarshaler = &InfoEx{}
)
func (this *InfoEx) UnmarshalBencode(data []byte) error {
this.Bytes = make([]byte, 0, len(data))
this.Bytes = append(this.Bytes, data...)
h := sha1.New()
_, err := h.Write(this.Bytes)
if err != nil {
panic(err)
}
this.Hash = h.Sum(nil)
return bencode.Unmarshal(data, &this.Info)
}
func (this InfoEx) MarshalBencode() ([]byte, error) {
if this.Bytes != nil {
return this.Bytes, nil
}
return bencode.Marshal(&this.Info)
}
type MetaInfo struct {
Info InfoEx `bencode:"info"`
Announce string `bencode:"announce"`
AnnounceList [][]string `bencode:"announce-list,omitempty"`
CreationDate int64 `bencode:"creation date,omitempty"`
Comment string `bencode:"comment,omitempty"`
CreatedBy string `bencode:"created by,omitempty"`
Encoding string `bencode:"encoding,omitempty"`
URLList interface{} `bencode:"url-list,omitempty"`
}

46
metainfo/metainfo_test.go Normal file
View File

@ -0,0 +1,46 @@
package metainfo
import (
"bytes"
"path"
"testing"
"github.com/anacrolix/libtorgo/bencode"
)
func test_file(t *testing.T, filename string) {
mi, err := LoadFromFile(filename)
if err != nil {
t.Fatal(err)
}
if len(mi.Info.Files) == 1 {
t.Logf("Single file: %s (length: %d)\n", mi.Info.Name, mi.Info.Files[0].Length)
} else {
t.Logf("Multiple files: %s\n", mi.Info.Name)
for _, f := range mi.Info.Files {
t.Logf(" - %s (length: %d)\n", path.Join(f.Path...), f.Length)
}
}
for _, group := range mi.AnnounceList {
for _, tracker := range group {
t.Logf("Tracker: %s\n", tracker)
}
}
// for _, url := range mi.WebSeedURLs {
// t.Logf("URL: %s\n", url)
// }
b, err := bencode.Marshal(mi.Info)
if !bytes.Equal(b, mi.Info.Bytes) {
t.Logf("\n%q\n%q", b[len(b)-20:], mi.Info.Bytes[len(mi.Info.Bytes)-20:])
t.Fatal("encoded and decoded bytes don't match")
}
}
func TestFile(t *testing.T) {
test_file(t, "_testdata/archlinux-2011.08.19-netinstall-i686.iso.torrent")
test_file(t, "_testdata/continuum.torrent")
test_file(t, "_testdata/23516C72685E8DB0C8F15553382A927F185C4F01.torrent")
}