|
1 | 1 | 'use strict'
|
2 | 2 |
|
3 | 3 | const bs58 = require('bs58')
|
| 4 | +const multipart = require('ipfs-multipart') |
4 | 5 | const debug = require('debug')
|
| 6 | +const tar = require('tar-stream') |
5 | 7 | const log = debug('http-api:files')
|
6 | 8 | log.error = debug('http-api:files:error')
|
| 9 | +const pull = require('pull-stream') |
| 10 | +const toStream = require('pull-stream-to-stream') |
| 11 | +const toPull = require('stream-to-pull-stream') |
| 12 | +const pushable = require('pull-pushable') |
| 13 | +const EOL = require('os').EOL |
7 | 14 |
|
8 | 15 | exports = module.exports
|
9 | 16 |
|
@@ -33,18 +40,152 @@ exports.cat = {
|
33 | 40 | // main route handler which is called after the above `parseArgs`, but only if the args were valid
|
34 | 41 | handler: (request, reply) => {
|
35 | 42 | const key = request.pre.args.key
|
| 43 | + const ipfs = request.server.app.ipfs |
36 | 44 |
|
37 |
| - request.server.app.ipfs.files.cat(key, (err, stream) => { |
| 45 | + ipfs.files.cat(key, (err, stream) => { |
38 | 46 | if (err) {
|
39 | 47 | log.error(err)
|
40 | 48 | return reply({
|
41 | 49 | Message: 'Failed to cat file: ' + err,
|
42 | 50 | Code: 0
|
43 | 51 | }).code(500)
|
44 | 52 | }
|
45 |
| - stream.on('data', (data) => { |
46 |
| - return reply(data) |
| 53 | + |
| 54 | + // hapi is not very clever and throws if no |
| 55 | + // - _read method |
| 56 | + // - _readableState object |
| 57 | + // are there :( |
| 58 | + stream._read = () => {} |
| 59 | + stream._readableState = {} |
| 60 | + return reply(stream).header('X-Stream-Output', '1') |
| 61 | + }) |
| 62 | + } |
| 63 | +} |
| 64 | + |
| 65 | +exports.get = { |
| 66 | + // uses common parseKey method that returns a `key` |
| 67 | + parseArgs: exports.parseKey, |
| 68 | + |
| 69 | + // main route handler which is called after the above `parseArgs`, but only if the args were valid |
| 70 | + handler: (request, reply) => { |
| 71 | + const key = request.pre.args.key |
| 72 | + const ipfs = request.server.app.ipfs |
| 73 | + const pack = tar.pack() |
| 74 | + |
| 75 | + ipfs.files.getPull(key, (err, stream) => { |
| 76 | + if (err) { |
| 77 | + log.error(err) |
| 78 | + |
| 79 | + reply({ |
| 80 | + Message: 'Failed to get file: ' + err, |
| 81 | + Code: 0 |
| 82 | + }).code(500) |
| 83 | + return |
| 84 | + } |
| 85 | + |
| 86 | + pull( |
| 87 | + stream, |
| 88 | + pull.asyncMap((file, cb) => { |
| 89 | + const header = {name: file.path} |
| 90 | + |
| 91 | + if (!file.content) { |
| 92 | + header.type = 'directory' |
| 93 | + pack.entry(header) |
| 94 | + cb() |
| 95 | + } else { |
| 96 | + header.size = file.size |
| 97 | + toStream.source(file.content) |
| 98 | + .pipe(pack.entry(header, cb)) |
| 99 | + } |
| 100 | + }), |
| 101 | + pull.onEnd((err) => { |
| 102 | + if (err) { |
| 103 | + log.error(err) |
| 104 | + |
| 105 | + reply({ |
| 106 | + Message: 'Failed to get file: ' + err, |
| 107 | + Code: 0 |
| 108 | + }).code(500) |
| 109 | + return |
| 110 | + } |
| 111 | + |
| 112 | + pack.finalize() |
| 113 | + }) |
| 114 | + ) |
| 115 | + |
| 116 | + // the reply must read the tar stream, |
| 117 | + // to pull values through |
| 118 | + reply(pack).header('X-Stream-Output', '1') |
| 119 | + }) |
| 120 | + } |
| 121 | +} |
| 122 | + |
| 123 | +exports.add = { |
| 124 | + handler: (request, reply) => { |
| 125 | + if (!request.payload) { |
| 126 | + return reply('Array, Buffer, or String is required.').code(400).takeover() |
| 127 | + } |
| 128 | + |
| 129 | + const ipfs = request.server.app.ipfs |
| 130 | + // TODO: make pull-multipart |
| 131 | + const parser = multipart.reqParser(request.payload) |
| 132 | + let filesParsed = false |
| 133 | + |
| 134 | + const fileAdder = pushable() |
| 135 | + |
| 136 | + parser.on('file', (fileName, fileStream) => { |
| 137 | + const filePair = { |
| 138 | + path: fileName, |
| 139 | + content: toPull(fileStream) |
| 140 | + } |
| 141 | + filesParsed = true |
| 142 | + fileAdder.push(filePair) |
| 143 | + }) |
| 144 | + |
| 145 | + parser.on('directory', (directory) => { |
| 146 | + fileAdder.push({ |
| 147 | + path: directory, |
| 148 | + content: '' |
47 | 149 | })
|
48 | 150 | })
|
| 151 | + |
| 152 | + parser.on('end', () => { |
| 153 | + if (!filesParsed) { |
| 154 | + return reply("File argument 'data' is required.") |
| 155 | + .code(400).takeover() |
| 156 | + } |
| 157 | + fileAdder.end() |
| 158 | + }) |
| 159 | + |
| 160 | + pull( |
| 161 | + fileAdder, |
| 162 | + ipfs.files.createAddPullStream(), |
| 163 | + pull.map((file) => { |
| 164 | + return { |
| 165 | + Name: file.path ? file.path : file.hash, |
| 166 | + Hash: file.hash |
| 167 | + } |
| 168 | + }), |
| 169 | + pull.map((file) => JSON.stringify(file) + EOL), |
| 170 | + pull.collect((err, files) => { |
| 171 | + if (err) { |
| 172 | + return reply({ |
| 173 | + Message: err, |
| 174 | + Code: 0 |
| 175 | + }).code(500) |
| 176 | + } |
| 177 | + |
| 178 | + if (files.length === 0 && filesParsed) { |
| 179 | + return reply({ |
| 180 | + Message: 'Failed to add files.', |
| 181 | + Code: 0 |
| 182 | + }).code(500) |
| 183 | + } |
| 184 | + |
| 185 | + reply(files.join('')) |
| 186 | + .header('x-chunked-output', '1') |
| 187 | + .header('content-type', 'application/json') |
| 188 | + }) |
| 189 | + ) |
49 | 190 | }
|
50 | 191 | }
|
0 commit comments