-
Notifications
You must be signed in to change notification settings - Fork 37
/
Copy pathindex.ts
473 lines (428 loc) · 14.7 KB
/
index.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
/**
* @packageDocumentation
*
* @example
*
* Let's create a little directory to import:
*
* ```console
* > cd /tmp
* > mkdir foo
* > echo 'hello' > foo/bar
* > echo 'world' > foo/quux
* ```
*
* And write the importing logic:
*
* ```js
* import { importer } from 'ipfs-unixfs-importer'
* import { MemoryBlockstore } from 'blockstore-core/memory'
* import * as fs from 'node:fs'
*
* // Where the blocks will be stored
* const blockstore = new MemoryBlockstore()
*
* // Import path /tmp/foo/
* const source = [{
* path: '/tmp/foo/bar',
* content: fs.createReadStream('/tmp/foo/bar')
* }, {
* path: '/tmp/foo/quxx',
* content: fs.createReadStream('/tmp/foo/quux')
* }]
*
* for await (const entry of importer(source, blockstore)) {
* console.info(entry)
* }
* ```
*
* When run, metadata about DAGNodes in the created tree is printed until the root:
*
* ```js
* {
* cid: CID, // see https://github.com/multiformats/js-cid
* path: 'tmp/foo/bar',
* unixfs: UnixFS // see https://github.com/ipfs/js-ipfs-unixfs
* }
* {
* cid: CID, // see https://github.com/multiformats/js-cid
* path: 'tmp/foo/quxx',
* unixfs: UnixFS // see https://github.com/ipfs/js-ipfs-unixfs
* }
* {
* cid: CID, // see https://github.com/multiformats/js-cid
* path: 'tmp/foo',
* unixfs: UnixFS // see https://github.com/ipfs/js-ipfs-unixfs
* }
* {
* cid: CID, // see https://github.com/multiformats/js-cid
* path: 'tmp',
* unixfs: UnixFS // see https://github.com/ipfs/js-ipfs-unixfs
* }
* ```
*/
import first from 'it-first'
import parallelBatch from 'it-parallel-batch'
import { fixedSize } from './chunker/fixed-size.js'
import { type BufferImportProgressEvents, defaultBufferImporter } from './dag-builder/buffer-importer.js'
import { type DAGBuilder, type DagBuilderProgressEvents, defaultDagBuilder } from './dag-builder/index.js'
import { type ChunkValidator, defaultChunkValidator } from './dag-builder/validate-chunks.js'
import { InvalidParametersError } from './errors.js'
import { balanced, type FileLayout } from './layout/index.js'
import { defaultTreeBuilder } from './tree-builder.js'
import type { Chunker } from './chunker/index.js'
import type { ReducerProgressEvents } from './dag-builder/file.js'
import type { Blockstore } from 'interface-blockstore'
import type { AwaitIterable } from 'interface-store'
import type { UnixFS, Mtime } from 'ipfs-unixfs'
import type { CID, Version as CIDVersion } from 'multiformats/cid'
import type { ProgressOptions } from 'progress-events'
export * from './errors.js'
export type ByteStream = AwaitIterable<Uint8Array>
export type ImportContent = ByteStream | Uint8Array
export type WritableStorage = Pick<Blockstore, 'put'>
export interface FileCandidate<T extends ImportContent = ImportContent> {
path?: string
content: T
mtime?: Mtime
mode?: number
}
export interface DirectoryCandidate {
path: string
mtime?: Mtime
mode?: number
}
export type ImportCandidate = FileCandidate | DirectoryCandidate
export interface File {
content: AsyncIterable<Uint8Array>
path?: string
mtime?: Mtime
mode?: number
originalPath?: string
}
export interface Directory {
path?: string
mtime?: Mtime
mode?: number
originalPath?: string
}
export interface ImportResult {
cid: CID
size: bigint
path?: string
unixfs?: UnixFS
}
export interface MultipleBlockImportResult extends ImportResult {
originalPath?: string
}
export interface SingleBlockImportResult extends ImportResult {
single: true
originalPath?: string
block: Uint8Array
}
export type InProgressImportResult = SingleBlockImportResult | MultipleBlockImportResult
export interface BufferImporterResult extends ImportResult {
block: Uint8Array
}
export interface HamtHashFn { (value: Uint8Array): Promise<Uint8Array> }
export interface TreeBuilder { (source: AsyncIterable<InProgressImportResult>, blockstore: WritableStorage): AsyncIterable<ImportResult> }
export interface BufferImporter { (file: File, blockstore: WritableStorage): AsyncIterable<() => Promise<BufferImporterResult>> }
export type ImporterProgressEvents =
BufferImportProgressEvents |
DagBuilderProgressEvents |
ReducerProgressEvents
/**
* Options to control the importer's behaviour
*/
export interface ImporterOptions extends ProgressOptions<ImporterProgressEvents> {
/**
* When a file would span multiple DAGNodes, if this is true the leaf nodes
* will not be wrapped in `UnixFS` protobufs and will instead contain the
* raw file bytes. Default: true
*/
rawLeaves?: boolean
/**
* If the file being imported is small enough to fit into one DAGNodes, store
* the file data in the root node along with the UnixFS metadata instead of
* in a leaf node which would then require additional I/O to load. Default: true
*/
reduceSingleLeafToSelf?: boolean
/**
* What type of UnixFS node leaves should be - can be `'file'` or `'raw'`
* (ignored when `rawLeaves` is `true`).
*
* This option exists to simulate kubo's trickle dag which uses a combination
* of `'raw'` UnixFS leaves and `reduceSingleLeafToSelf: false`.
*
* For modern code the `rawLeaves: true` option should be used instead so leaves
* are plain Uint8Arrays without a UnixFS/Protobuf wrapper.
*/
leafType?: 'file' | 'raw'
/**
* the CID version to use when storing the data. Default: 1
*/
cidVersion?: CIDVersion
/**
* If the serialized node is larger than this it might be converted to a HAMT
* sharded directory. Default: 256KiB
*/
shardSplitThresholdBytes?: number
/**
* The number of bits of a hash digest used at each level of sharding to
* the child index. 2**shardFanoutBits will dictate the maximum number of
* children for any shard in the HAMT. Default: 8
*/
shardFanoutBits?: number
/**
* How many files to import concurrently. For large numbers of small files this
* should be high (e.g. 50). Default: 10
*/
fileImportConcurrency?: number
/**
* How many blocks to hash and write to the block store concurrently. For small
* numbers of large files this should be high (e.g. 50). Default: 50
*/
blockWriteConcurrency?: number
/**
* If true, all imported files and folders will be contained in a directory that
* will correspond to the CID of the final entry yielded. Default: false
*/
wrapWithDirectory?: boolean
/**
* The chunking strategy. See [./src/chunker/index.ts](./src/chunker/index.ts)
* for available chunkers. Default: fixedSize
*/
chunker?: Chunker
/**
* How the DAG that represents files are created. See
* [./src/layout/index.ts](./src/layout/index.ts) for available layouts. Default: balanced
*/
layout?: FileLayout
/**
* This option can be used to override the importer internals.
*
* This function should read `{ path, content }` entries from `source` and turn them
* into DAGs
* It should yield a `function` that returns a `Promise` that resolves to
* `{ cid, path, unixfs, node }` where `cid` is a `CID`, `path` is a string, `unixfs`
* is a UnixFS entry and `node` is a `DAGNode`.
* Values will be pulled from this generator in parallel - the amount of parallelisation
* is controlled by the `fileImportConcurrency` option (default: 50)
*/
dagBuilder?: DAGBuilder
/**
* This option can be used to override the importer internals.
*
* This function should read `{ cid, path, unixfs, node }` entries from `source` and
* place them in a directory structure
* It should yield an object with the properties `{ cid, path, unixfs, size }` where
* `cid` is a `CID`, `path` is a string, `unixfs` is a UnixFS entry and `size` is a `Number`.
*/
treeBuilder?: TreeBuilder
/**
* This option can be used to override the importer internals.
*
* This function should read `Buffer`s from `source` and persist them using `blockstore.put`
* or similar
* `entry` is the `{ path, content }` entry, where `entry.content` is an async
* generator that yields Buffers
* It should yield functions that return a Promise that resolves to an object with
* the properties `{ cid, unixfs, size }` where `cid` is a [CID], `unixfs` is a [UnixFS] entry and `size` is a `Number` that represents the serialized size of the [IPLD] node that holds the buffer data.
* Values will be pulled from this generator in parallel - the amount of
* parallelisation is controlled by the `blockWriteConcurrency` option (default: 10)
*/
bufferImporter?: BufferImporter
/**
* This option can be used to override the importer internals.
*
* This function takes input from the `content` field of imported entries.
* It should transform them into `Buffer`s, throwing an error if it cannot.
* It should yield `Buffer` objects constructed from the `source` or throw an
* `Error`
*/
chunkValidator?: ChunkValidator
}
export type ImportCandidateStream = AsyncIterable<FileCandidate | DirectoryCandidate> | Iterable<FileCandidate | DirectoryCandidate>
/**
* The importer creates UnixFS DAGs and stores the blocks that make
* them up in the passed blockstore.
*
* @example
*
* ```typescript
* import { importer } from 'ipfs-unixfs-importer'
* import { MemoryBlockstore } from 'blockstore-core'
*
* // store blocks in memory, other blockstores are available
* const blockstore = new MemoryBlockstore()
*
* const input = [{
* path: './foo.txt',
* content: Uint8Array.from([0, 1, 2, 3, 4])
* }, {
* path: './bar.txt',
* content: Uint8Array.from([0, 1, 2, 3, 4])
* }]
*
* for await (const entry of importer(input, blockstore)) {
* console.info(entry)
* // { cid: CID(), ... }
* }
* ```
*/
export async function * importer (source: ImportCandidateStream, blockstore: WritableStorage, options: ImporterOptions = {}): AsyncGenerator<ImportResult, void, unknown> {
let candidates: AsyncIterable<FileCandidate | DirectoryCandidate> | Iterable<FileCandidate | DirectoryCandidate>
if (Symbol.asyncIterator in source || Symbol.iterator in source) {
candidates = source
} else {
candidates = [source]
}
const wrapWithDirectory = options.wrapWithDirectory ?? false
const shardSplitThresholdBytes = options.shardSplitThresholdBytes ?? 262144
const shardFanoutBits = options.shardFanoutBits ?? 8
const cidVersion = options.cidVersion ?? 1
const rawLeaves = options.rawLeaves ?? true
const leafType = options.leafType ?? 'file'
const fileImportConcurrency = options.fileImportConcurrency ?? 50
const blockWriteConcurrency = options.blockWriteConcurrency ?? 10
const reduceSingleLeafToSelf = options.reduceSingleLeafToSelf ?? true
const chunker = options.chunker ?? fixedSize()
const chunkValidator = options.chunkValidator ?? defaultChunkValidator()
const buildDag: DAGBuilder = options.dagBuilder ?? defaultDagBuilder({
chunker,
chunkValidator,
wrapWithDirectory,
layout: options.layout ?? balanced(),
bufferImporter: options.bufferImporter ?? defaultBufferImporter({
cidVersion,
rawLeaves,
leafType,
onProgress: options.onProgress
}),
blockWriteConcurrency,
reduceSingleLeafToSelf,
cidVersion,
onProgress: options.onProgress
})
const buildTree: TreeBuilder = options.treeBuilder ?? defaultTreeBuilder({
wrapWithDirectory,
shardSplitThresholdBytes,
shardFanoutBits,
cidVersion,
onProgress: options.onProgress
})
for await (const entry of buildTree(parallelBatch(buildDag(candidates, blockstore), fileImportConcurrency), blockstore)) {
yield {
cid: entry.cid,
path: entry.path,
unixfs: entry.unixfs,
size: entry.size
}
}
}
/**
* `importFile` is similar to `importer` except it accepts a single
* `FileCandidate` and returns a promise of a single `ImportResult`
* instead of a stream of results.
*
* @example
*
* ```typescript
* import { importFile } from 'ipfs-unixfs-importer'
* import { MemoryBlockstore } from 'blockstore-core'
*
* // store blocks in memory, other blockstores are available
* const blockstore = new MemoryBlockstore()
*
* const input: FileCandidate = {
* path: './foo.txt',
* content: Uint8Array.from([0, 1, 2, 3, 4])
* }
*
* const entry = await importFile(input, blockstore)
* ```
*/
export async function importFile (content: FileCandidate, blockstore: WritableStorage, options: ImporterOptions = {}): Promise<ImportResult> {
const result = await first(importer([content], blockstore, options))
if (result == null) {
throw new InvalidParametersError('Nothing imported')
}
return result
}
/**
* `importDir` is similar to `importer` except it accepts a single
* `DirectoryCandidate` and returns a promise of a single `ImportResult`
* instead of a stream of results.
*
* @example
*
* ```typescript
* import { importDirectory } from 'ipfs-unixfs-importer'
* import { MemoryBlockstore } from 'blockstore-core'
*
* // store blocks in memory, other blockstores are available
* const blockstore = new MemoryBlockstore()
*
* const input: DirectoryCandidate = {
* path: './foo.txt'
* }
*
* const entry = await importDirectory(input, blockstore)
* ```
*/
export async function importDirectory (content: DirectoryCandidate, blockstore: WritableStorage, options: ImporterOptions = {}): Promise<ImportResult> {
const result = await first(importer([content], blockstore, options))
if (result == null) {
throw new InvalidParametersError('Nothing imported')
}
return result
}
/**
* `importBytes` accepts a single Uint8Array and returns a promise
* of a single `ImportResult`.
*
* @example
*
* ```typescript
* import { importBytes } from 'ipfs-unixfs-importer'
* import { MemoryBlockstore } from 'blockstore-core'
*
* // store blocks in memory, other blockstores are available
* const blockstore = new MemoryBlockstore()
*
* const input = Uint8Array.from([0, 1, 2, 3, 4])
*
* const entry = await importBytes(input, blockstore)
* ```
*/
export async function importBytes (buf: ImportContent, blockstore: WritableStorage, options: ImporterOptions = {}): Promise<ImportResult> {
return importFile({
content: buf
}, blockstore, options)
}
/**
* `importByteStream` accepts a single stream of Uint8Arrays and
* returns a promise of a single `ImportResult`.
*
* @example
*
* ```typescript
* import { importByteStream } from 'ipfs-unixfs-importer'
* import { MemoryBlockstore } from 'blockstore-core'
*
* // store blocks in memory, other blockstores are available
* const blockstore = new MemoryBlockstore()
*
* const input = [
* Uint8Array.from([0, 1, 2, 3, 4]),
* Uint8Array.from([5, 6, 7, 8, 9])
* ]
*
* const entry = await importByteStream(input, blockstore)
* ```
*/
export async function importByteStream (bufs: ByteStream, blockstore: WritableStorage, options: ImporterOptions = {}): Promise<ImportResult> {
return importFile({
content: bufs
}, blockstore, options)
}