11import * as dagPb from '@ipld/dag-pb'
2- import errCode from 'err-code'
32import { UnixFS } from 'ipfs-unixfs'
43import map from 'it-map'
54import parallel from 'it-parallel'
@@ -8,6 +7,7 @@ import { type Pushable, pushable } from 'it-pushable'
87import * as raw from 'multiformats/codecs/raw'
98import PQueue from 'p-queue'
109import { CustomProgressEvent } from 'progress-events'
10+ import { NotUnixFSError , OverReadError , UnderReadError } from '../../../errors.js'
1111import extractDataFromBlock from '../../../utils/extract-data-from-block.js'
1212import validateOffsetAndLength from '../../../utils/validate-offset-and-length.js'
1313import type { ExporterOptions , UnixfsV1FileContent , UnixfsV1Resolver , ReadableStorage , ExportProgress , ExportWalk } from '../../../index.js'
@@ -23,15 +23,15 @@ async function walkDAG (blockstore: ReadableStorage, node: dagPb.PBNode | Uint8A
2323 }
2424
2525 if ( node . Data == null ) {
26- throw errCode ( new Error ( 'no data in PBNode' ) , 'ERR_NOT_UNIXFS ')
26+ throw new NotUnixFSError ( 'no data in PBNode' )
2727 }
2828
2929 let file : UnixFS
3030
3131 try {
3232 file = UnixFS . unmarshal ( node . Data )
3333 } catch ( err : any ) {
34- throw errCode ( err , 'ERR_NOT_UNIXFS' )
34+ throw new NotUnixFSError ( err . message )
3535 }
3636
3737 // might be a unixfs `raw` node or have data on intermediate nodes
@@ -47,7 +47,7 @@ async function walkDAG (blockstore: ReadableStorage, node: dagPb.PBNode | Uint8A
4747 const childOps : Array < { link : dagPb . PBLink , blockStart : bigint } > = [ ]
4848
4949 if ( node . Links . length !== file . blockSizes . length ) {
50- throw errCode ( new Error ( 'Inconsistent block sizes and dag links' ) , 'ERR_NOT_UNIXFS ')
50+ throw new NotUnixFSError ( 'Inconsistent block sizes and dag links' )
5151 }
5252
5353 for ( let i = 0 ; i < node . Links . length ; i ++ ) {
@@ -98,7 +98,7 @@ async function walkDAG (blockstore: ReadableStorage, node: dagPb.PBNode | Uint8A
9898 child = block
9999 break
100100 default :
101- queue . end ( errCode ( new Error ( `Unsupported codec: ${ link . Hash . code } ` ) , 'ERR_NOT_UNIXFS' ) )
101+ queue . end ( new NotUnixFSError ( `Unsupported codec: ${ link . Hash . code } ` ) )
102102 return
103103 }
104104
@@ -171,7 +171,7 @@ const fileContent: UnixfsV1Resolver = (cid, node, unixfs, path, resolve, depth,
171171
172172 if ( read > wanted ) {
173173 queue . end ( )
174- throw errCode ( new Error ( 'Read too many bytes - the file size reported by the UnixFS data in the root node may be incorrect' ) , 'ERR_OVER_READ ')
174+ throw new OverReadError ( 'Read too many bytes - the file size reported by the UnixFS data in the root node may be incorrect' )
175175 }
176176
177177 if ( read === wanted ) {
@@ -188,7 +188,7 @@ const fileContent: UnixfsV1Resolver = (cid, node, unixfs, path, resolve, depth,
188188 }
189189
190190 if ( read < wanted ) {
191- throw errCode ( new Error ( 'Traversed entire DAG but did not read enough bytes' ) , 'ERR_UNDER_READ ')
191+ throw new UnderReadError ( 'Traversed entire DAG but did not read enough bytes' )
192192 }
193193 }
194194
0 commit comments