@@ -14,12 +14,17 @@ const CID = require('cids')
1414const loadFixture = require ( 'aegir/fixtures' )
1515const doUntil = require ( 'async/doUntil' )
1616const waterfall = require ( 'async/waterfall' )
17+ const parallel = require ( 'async/parallel' )
1718const series = require ( 'async/series' )
1819const fs = require ( 'fs' )
1920const path = require ( 'path' )
2021const push = require ( 'pull-pushable' )
2122const toPull = require ( 'stream-to-pull-stream' )
2223const toStream = require ( 'pull-stream-to-stream' )
24+ const {
25+ DAGNode,
26+ DAGLink
27+ } = require ( 'ipld-dag-pb' )
2328
2429const unixFSEngine = require ( './../src' )
2530const exporter = unixFSEngine . exporter
@@ -635,6 +640,79 @@ module.exports = (repo) => {
635640 } )
636641 )
637642 } )
643+
644+ it ( 'exports file with data on internal and leaf nodes' , function ( done ) {
645+ waterfall ( [
646+ ( cb ) => createAndPersistNode ( ipld , 'raw' , [ 0x04 , 0x05 , 0x06 , 0x07 ] , [ ] , cb ) ,
647+ ( leaf , cb ) => createAndPersistNode ( ipld , 'file' , [ 0x00 , 0x01 , 0x02 , 0x03 ] , [
648+ leaf
649+ ] , cb ) ,
650+ ( file , cb ) => {
651+ pull (
652+ exporter ( file . multihash , ipld ) ,
653+ pull . asyncMap ( ( file , cb ) => readFile ( file , cb ) ) ,
654+ pull . through ( buffer => {
655+ expect ( buffer ) . to . deep . equal ( Buffer . from ( [ 0x00 , 0x01 , 0x02 , 0x03 , 0x04 , 0x05 , 0x06 , 0x07 ] ) )
656+ } ) ,
657+ pull . collect ( cb )
658+ )
659+ }
660+ ] , done )
661+ } )
662+
663+ it ( 'exports file with data on some internal and leaf nodes' , function ( done ) {
664+ // create a file node with three children:
665+ // where:
666+ // i = internal node without data
667+ // d = internal node with data
668+ // l = leaf node with data
669+ // i
670+ // / | \
671+ // l d i
672+ // | \
673+ // l l
674+ waterfall ( [
675+ ( cb ) => {
676+ // create leaves
677+ parallel ( [
678+ ( next ) => createAndPersistNode ( ipld , 'raw' , [ 0x00 , 0x01 , 0x02 , 0x03 ] , [ ] , next ) ,
679+ ( next ) => createAndPersistNode ( ipld , 'raw' , [ 0x08 , 0x09 , 0x10 , 0x11 ] , [ ] , next ) ,
680+ ( next ) => createAndPersistNode ( ipld , 'raw' , [ 0x12 , 0x13 , 0x14 , 0x15 ] , [ ] , next )
681+ ] , cb )
682+ } ,
683+ ( leaves , cb ) => {
684+ parallel ( [
685+ ( next ) => createAndPersistNode ( ipld , 'raw' , [ 0x04 , 0x05 , 0x06 , 0x07 ] , [ leaves [ 1 ] ] , next ) ,
686+ ( next ) => createAndPersistNode ( ipld , 'raw' , null , [ leaves [ 2 ] ] , next )
687+ ] , ( error , internalNodes ) => {
688+ if ( error ) {
689+ return cb ( error )
690+ }
691+
692+ createAndPersistNode ( ipld , 'file' , null , [
693+ leaves [ 0 ] ,
694+ internalNodes [ 0 ] ,
695+ internalNodes [ 1 ]
696+ ] , cb )
697+ } )
698+ } ,
699+ ( file , cb ) => {
700+ pull (
701+ exporter ( file . multihash , ipld ) ,
702+ pull . asyncMap ( ( file , cb ) => readFile ( file , cb ) ) ,
703+ pull . through ( buffer => {
704+ expect ( buffer ) . to . deep . equal (
705+ Buffer . from ( [
706+ 0x00 , 0x01 , 0x02 , 0x03 , 0x04 , 0x05 , 0x06 , 0x07 ,
707+ 0x08 , 0x09 , 0x10 , 0x11 , 0x12 , 0x13 , 0x14 , 0x15
708+ ] )
709+ )
710+ } ) ,
711+ pull . collect ( cb )
712+ )
713+ }
714+ ] , done )
715+ } )
638716 } )
639717}
640718
@@ -670,3 +748,26 @@ function readFile (file, done) {
670748 } )
671749 )
672750}
751+
752+ function createAndPersistNode ( ipld , type , data , children , callback ) {
753+ const file = new UnixFS ( type , data ? Buffer . from ( data ) : undefined )
754+ const links = [ ]
755+
756+ children . forEach ( child => {
757+ const leaf = UnixFS . unmarshal ( child . data )
758+
759+ file . addBlockSize ( leaf . fileSize ( ) )
760+
761+ links . push ( new DAGLink ( '' , child . size , child . multihash ) )
762+ } )
763+
764+ DAGNode . create ( file . marshal ( ) , links , ( error , node ) => {
765+ if ( error ) {
766+ return callback ( error )
767+ }
768+
769+ ipld . put ( node , {
770+ cid : new CID ( node . multihash )
771+ } , ( error ) => callback ( error , node ) )
772+ } )
773+ }
0 commit comments