mirror of
https://github.com/ipfs/kubo.git
synced 2026-02-25 20:37:53 +08:00
fix tests (circular import)
This commit is contained in:
parent
71ac117546
commit
f0d823c2db
@ -161,8 +161,8 @@ func NewIpfsNode(cfg *config.Config, online bool) (n *IpfsNode, err error) {
|
||||
if err != nil {
|
||||
n.Pinning = pin.NewPinner(n.Datastore, n.DAG)
|
||||
}
|
||||
n.Resolver = &path.Resolver{DAG: n.DAG}
|
||||
|
||||
n.Resolver = &path.Resolver{DAG: n.DAG}
|
||||
|
||||
success = true
|
||||
return n, nil
|
||||
}
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
package io
|
||||
package io_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
@ -7,9 +7,10 @@ import (
|
||||
|
||||
ds "github.com/jbenet/go-ipfs/Godeps/_workspace/src/github.com/jbenet/go-datastore"
|
||||
bs "github.com/jbenet/go-ipfs/blockservice"
|
||||
"github.com/jbenet/go-ipfs/importer"
|
||||
importer "github.com/jbenet/go-ipfs/importer"
|
||||
chunk "github.com/jbenet/go-ipfs/importer/chunk"
|
||||
mdag "github.com/jbenet/go-ipfs/merkledag"
|
||||
dagio "github.com/jbenet/go-ipfs/unixfs/io"
|
||||
)
|
||||
|
||||
type datasource struct {
|
||||
@ -55,7 +56,7 @@ func TestDagWriter(t *testing.T) {
|
||||
t.Fatal(err)
|
||||
}
|
||||
dag := mdag.NewDAGService(bserv)
|
||||
dw := NewDagWriter(dag, &chunk.SizeSplitter{Size: 4096})
|
||||
dw := dagio.NewDagWriter(dag, &chunk.SizeSplitter{Size: 4096})
|
||||
|
||||
nbytes := int64(1024 * 1024 * 2)
|
||||
n, err := io.CopyN(dw, &datasource{}, nbytes)
|
||||
@ -70,7 +71,7 @@ func TestDagWriter(t *testing.T) {
|
||||
dw.Close()
|
||||
|
||||
node := dw.GetNode()
|
||||
read, err := NewDagReader(node, dag)
|
||||
read, err := dagio.NewDagReader(node, dag)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
@ -89,7 +90,7 @@ func TestMassiveWrite(t *testing.T) {
|
||||
t.Fatal(err)
|
||||
}
|
||||
dag := mdag.NewDAGService(bserv)
|
||||
dw := NewDagWriter(dag, &chunk.SizeSplitter{Size: 4096})
|
||||
dw := dagio.NewDagWriter(dag, &chunk.SizeSplitter{Size: 4096})
|
||||
|
||||
nbytes := int64(1024 * 1024 * 1024 * 16)
|
||||
n, err := io.CopyN(dw, &datasource{}, nbytes)
|
||||
@ -114,7 +115,7 @@ func BenchmarkDagWriter(b *testing.B) {
|
||||
nbytes := int64(100000)
|
||||
for i := 0; i < b.N; i++ {
|
||||
b.SetBytes(nbytes)
|
||||
dw := NewDagWriter(dag, &chunk.SizeSplitter{Size: 4096})
|
||||
dw := dagio.NewDagWriter(dag, &chunk.SizeSplitter{Size: 4096})
|
||||
n, err := io.CopyN(dw, &datasource{}, nbytes)
|
||||
if err != nil {
|
||||
b.Fatal(err)
|
||||
@ -138,7 +139,7 @@ func TestAgainstImporter(t *testing.T) {
|
||||
nbytes := int64(1024 * 1024 * 2)
|
||||
|
||||
// DagWriter
|
||||
dw := NewDagWriter(dag, &chunk.SizeSplitter{4096})
|
||||
dw := dagio.NewDagWriter(dag, &chunk.SizeSplitter{4096})
|
||||
n, err := io.CopyN(dw, &datasource{}, nbytes)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
|
||||
Loading…
Reference in New Issue
Block a user