|
1 | 1 | 'use strict' |
2 | 2 |
|
3 | | -// @ts-ignore |
4 | 3 | const dagPb = require('@ipld/dag-pb') |
5 | | -const { sha256, sha512 } = require('multiformats/hashes/sha2') |
6 | 4 | const { CID } = require('multiformats/cid') |
7 | 5 | const log = require('debug')('ipfs:mfs:core:utils:add-link') |
8 | 6 | const { UnixFS } = require('ipfs-unixfs') |
9 | | -// @ts-ignore - refactor this to not need deep require |
10 | | -const DirSharded = require('ipfs-unixfs-importer/src/dir-sharded') |
11 | | -// @ts-ignore - refactor this to not need deep require |
12 | | -const defaultImporterOptions = require('ipfs-unixfs-importer/src/options') |
| 7 | +const DirSharded = require('./dir-sharded') |
13 | 8 | const { |
14 | 9 | updateHamtDirectory, |
15 | 10 | recreateHamtLevel, |
@@ -223,6 +218,11 @@ const addToShardedDirectory = async (context, options) => { |
223 | 218 | shard, path |
224 | 219 | } = await addFileToShardedDirectory(context, options) |
225 | 220 | const result = await last(shard.flush(context.repo.blocks)) |
| 221 | + |
| 222 | + if (!result) { |
| 223 | + throw new Error('No result from flushing shard') |
| 224 | + } |
| 225 | + |
226 | 226 | const block = await context.repo.blocks.get(result.cid) |
227 | 227 | const node = dagPb.decode(block) |
228 | 228 |
|
@@ -269,44 +269,24 @@ const addFileToShardedDirectory = async (context, options) => { |
269 | 269 | // start at the root bucket and descend, loading nodes as we go |
270 | 270 | const rootBucket = await recreateInitialHamtLevel(options.parent.Links) |
271 | 271 | const node = UnixFS.unmarshal(options.parent.Data) |
272 | | - const importerOptions = defaultImporterOptions() |
273 | | - |
274 | | - // NOTE vmx 2021-04-01: in ipfs the hash algorithm is a constant in unixfs |
275 | | - // it's an implementation. Do the option conversion at the boundary between |
276 | | - // ipfs and unixfs. |
277 | | - let hasher |
278 | | - switch (options.hashAlg) { |
279 | | - case 'sha2-256': |
280 | | - hasher = sha256 |
281 | | - break |
282 | | - case 'sha2-512': |
283 | | - hasher = sha512 |
284 | | - break |
285 | | - default: |
286 | | - throw new Error(`TODO vmx 2021-03-31: Proper error message for unsupported hash algorithms like ${options.hashAlg}`) |
287 | | - } |
288 | 272 |
|
289 | 273 | const shard = new DirSharded({ |
290 | 274 | root: true, |
291 | 275 | dir: true, |
292 | | - parent: null, |
293 | | - parentKey: null, |
| 276 | + parent: undefined, |
| 277 | + parentKey: undefined, |
294 | 278 | path: '', |
295 | 279 | dirty: true, |
296 | 280 | flat: false, |
297 | 281 | mode: node.mode |
298 | | - }, { |
299 | | - hamtHashFn: importerOptions.hamtHashFn, |
300 | | - hamtHashCode: importerOptions.hamtHashCode, |
301 | | - hamtBucketBits: importerOptions.hamtBucketBits, |
302 | | - hasher, |
303 | | - ...options |
304 | | - }) |
| 282 | + }, options) |
305 | 283 | shard._bucket = rootBucket |
306 | 284 |
|
307 | 285 | if (node.mtime) { |
308 | 286 | // update mtime if previously set |
309 | | - shard.mtime = new Date() |
| 287 | + shard.mtime = { |
| 288 | + secs: Math.round(Date.now() / 1000) |
| 289 | + } |
310 | 290 | } |
311 | 291 |
|
312 | 292 | // load subshards until the bucket & position no longer changes |
|
0 commit comments