Add File.Cancel
This commit is contained in:
parent
b4f04ddc61
commit
63c73e18b3
14
file.go
14
file.go
|
@ -78,3 +78,17 @@ func (f *File) State() (ret []FilePieceState) {
|
|||
func (f *File) Download() {
|
||||
f.t.DownloadPieces(f.t.torrent.byteRegionPieces(f.offset, f.length))
|
||||
}
|
||||
|
||||
func byteRegionExclusivePieces(off, size, pieceSize int64) (begin, end int) {
|
||||
begin = int((off + pieceSize - 1) / pieceSize)
|
||||
end = int((off + size) / pieceSize)
|
||||
return
|
||||
}
|
||||
|
||||
func (f *File) exclusivePieces() (begin, end int) {
|
||||
return byteRegionExclusivePieces(f.offset, f.length, int64(f.t.torrent.usualPieceSize()))
|
||||
}
|
||||
|
||||
func (f *File) Cancel() {
|
||||
f.t.CancelPieces(f.exclusivePieces())
|
||||
}
|
||||
|
|
|
@ -0,0 +1,22 @@
|
|||
package torrent
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestFileExclusivePieces(t *testing.T) {
|
||||
for _, _case := range []struct {
|
||||
off, size, pieceSize int64
|
||||
begin, end int
|
||||
}{
|
||||
{0, 2, 2, 0, 1},
|
||||
{1, 2, 2, 1, 1},
|
||||
{1, 4, 2, 1, 2},
|
||||
} {
|
||||
begin, end := byteRegionExclusivePieces(_case.off, _case.size, _case.pieceSize)
|
||||
assert.EqualValues(t, _case.begin, begin)
|
||||
assert.EqualValues(t, _case.end, end)
|
||||
}
|
||||
}
|
9
piece.go
9
piece.go
|
@ -52,6 +52,15 @@ func (p *piece) pendingChunk(cs chunkSpec, chunkSize pp.Integer) bool {
|
|||
return !p.DirtyChunks[ci]
|
||||
}
|
||||
|
||||
func (p *piece) hasDirtyChunks() bool {
|
||||
for _, dirty := range p.DirtyChunks {
|
||||
if dirty {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (p *piece) numDirtyChunks() (ret int) {
|
||||
for _, dirty := range p.DirtyChunks {
|
||||
if dirty {
|
||||
|
|
10
t.go
10
t.go
|
@ -140,7 +140,11 @@ func (t Torrent) deleteReader(r *Reader) {
|
|||
func (t Torrent) DownloadPieces(begin, end int) {
|
||||
t.cl.mu.Lock()
|
||||
defer t.cl.mu.Unlock()
|
||||
for i := begin; i < end; i++ {
|
||||
t.torrent.pendPiece(i, t.cl)
|
||||
}
|
||||
t.torrent.pendPieceRange(begin, end)
|
||||
}
|
||||
|
||||
func (t Torrent) CancelPieces(begin, end int) {
|
||||
t.cl.mu.Lock()
|
||||
defer t.cl.mu.Unlock()
|
||||
t.torrent.unpendPieceRange(begin, end)
|
||||
}
|
||||
|
|
42
torrent.go
42
torrent.go
|
@ -508,9 +508,14 @@ func (t *torrent) bytesLeft() (left int64) {
|
|||
return
|
||||
}
|
||||
|
||||
func (t *torrent) piecePartiallyDownloaded(index int) bool {
|
||||
pendingBytes := t.pieceNumPendingBytes(index)
|
||||
return pendingBytes != 0 && pendingBytes != t.pieceLength(index)
|
||||
func (t *torrent) piecePartiallyDownloaded(piece int) bool {
|
||||
if t.pieceComplete(piece) {
|
||||
return false
|
||||
}
|
||||
if t.pieceAllDirty(piece) {
|
||||
return true
|
||||
}
|
||||
return t.Pieces[piece].hasDirtyChunks()
|
||||
}
|
||||
|
||||
func numChunksForPiece(chunkSize int, pieceSize int) int {
|
||||
|
@ -976,6 +981,37 @@ func (t *torrent) pendPiece(piece int, cl *Client) {
|
|||
t.piecePriorityChanged(piece)
|
||||
}
|
||||
|
||||
func (t *torrent) getCompletedPieces() (ret bitmap.Bitmap) {
|
||||
for i := range iter.N(t.numPieces()) {
|
||||
if t.pieceComplete(i) {
|
||||
ret.Add(i)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (t *torrent) pendPieces(pend *bitmap.Bitmap) {
|
||||
t.pendingPieces.Union(pend)
|
||||
t.updatePiecePriorities()
|
||||
}
|
||||
|
||||
func (t *torrent) unpendPieces(unpend *bitmap.Bitmap) {
|
||||
t.pendingPieces.Sub(unpend)
|
||||
t.updatePiecePriorities()
|
||||
}
|
||||
|
||||
func (t *torrent) pendPieceRange(begin, end int) {
|
||||
var bm bitmap.Bitmap
|
||||
bm.AddRange(begin, end)
|
||||
t.pendPieces(&bm)
|
||||
}
|
||||
|
||||
func (t *torrent) unpendPieceRange(begin, end int) {
|
||||
var bm bitmap.Bitmap
|
||||
bm.AddRange(begin, end)
|
||||
t.unpendPieces(&bm)
|
||||
}
|
||||
|
||||
func (t *torrent) connRequestPiecePendingChunks(c *connection, piece int) (more bool) {
|
||||
if !c.PeerHasPiece(piece) {
|
||||
return true
|
||||
|
|
Loading…
Reference in New Issue