diff --git a/README.md b/README.md index b9b747a4..6ec42cb3 100644 --- a/README.md +++ b/README.md @@ -12,7 +12,7 @@ IPFS unixFS Engine ![](https://img.shields.io/badge/npm-%3E%3D3.0.0-orange.svg?style=flat-square) ![](https://img.shields.io/badge/Node.js-%3E%3D4.0.0-orange.svg?style=flat-square) -> JavaScript implementation of the layout and chunking mechanisms used by IPFS +> JavaScript implementation of the layout and chunking mechanisms used by IPFS to handle Files ## Table of Contents @@ -35,9 +35,12 @@ IPFS unixFS Engine ## Usage -### Example Importer +### Importer + +#### Importer example Let's create a little directory to import: + ```sh > cd /tmp > mkdir foo @@ -46,6 +49,7 @@ Let's create a little directory to import: ``` And write the importing logic: + ```js const Importer = require('ipfs-unixfs-engine').Importer const filesAddStream = new Importer(, size: 39243, path: '/tmp/foo/bar' } @@ -93,15 +98,15 @@ When run, the stat of DAG Node is outputted for each file on data event until th ``` -### Importer API +#### Importer API ```js const Importer = require('ipfs-unixfs-engine').Importer ``` -#### const add = new Importer(dag) +#### const import = new Importer(dag [, options]) -The importer is a object Transform stream that accepts objects of the form +The `import` object is a duplex pull stream that takes objects of the form: ```js { @@ -110,50 +115,50 @@ The importer is a object Transform stream that accepts objects of the form } ``` -The stream will output IPFS DAG Node stats for the nodes as they are added to -the DAG Service. When stats on a node are emitted they are guaranteed to have -been written into the [DAG Service][]'s storage mechanism. +`import` will outoyt file info objects as files get stored in IPFS. When stats on a node are emitted they are guaranteed to have been written. -The input's file paths and directory structure will be preserved in the DAG -Nodes. +`dag` is an instance of the [`IPLD Resolver`](https://github.com/ipld/js-ipld-resolver) or the [`js-ipfs` `dag api`](https://github.com/ipfs/interface-ipfs-core/tree/master/API/dag) -### Importer options +The input's file paths and directory structure will be preserved in the [`dag-pb`](https://github.com/ipld/js-ipld-dag-pb) created nodes. -In the second argument of the importer constructor you can specify the following options: +`options` is an JavaScript option that might include the following keys: -* `wrap` (boolean, defaults to false): if true, a wrapping node will be created -* `shardSplitThreshold` (positive integer, defaults to 1000): the number of directory entries above which we decide to use a sharding directory builder (instead of the default flat one) -* `chunker` (string, defaults to `"fixed"`): the chunking strategy. Now only supports `"fixed"` -* `chunkerOptions` (object, optional): the options for the chunker. Defaults to an object with the following properties: - * `maxChunkSize` (positive integer, defaults to `262144`): the maximum chunk size for the `fixed` chunker. -* `strategy` (string, defaults to `"balanced"`): the DAG builder strategy name. Supports: - * `flat`: flat list of chunks - * `balanced`: builds a balanced tree - * `trickle`: builds [a trickle tree](https://github.com/ipfs/specs/pull/57#issuecomment-265205384) -* `maxChildrenPerNode` (positive integer, defaults to `174`): the maximum children per node for the `balanced` and `trickle` DAG builder strategies -* `layerRepeat` (positive integer, defaults to 4): (only applicable to the `trickle` DAG builder strategy). The maximum repetition of parent nodes for each layer of the tree. -* `reduceSingleLeafToSelf` (boolean, defaults to `false`): optimization for, when reducing a set of nodes with one node, reduce it to that node. -* `dirBuilder` (object): the options for the directory builder - * `hamt` (object): the options for the HAMT sharded directory builder - * bits (positive integer, defaults to `5`): the number of bits at each bucket of the HAMT +- `wrap` (boolean, defaults to false): if true, a wrapping node will be created +- `shardSplitThreshold` (positive integer, defaults to 1000): the number of directory entries above which we decide to use a sharding directory builder (instead of the default flat one) +- `chunker` (string, defaults to `"fixed"`): the chunking strategy. Now only supports `"fixed"` +- `chunkerOptions` (object, optional): the options for the chunker. Defaults to an object with the following properties: + - `maxChunkSize` (positive integer, defaults to `262144`): the maximum chunk size for the `fixed` chunker. +- `strategy` (string, defaults to `"balanced"`): the DAG builder strategy name. Supports: + - `flat`: flat list of chunks + - `balanced`: builds a balanced tree + - `trickle`: builds [a trickle tree](https://github.com/ipfs/specs/pull/57#issuecomment-265205384) +- `maxChildrenPerNode` (positive integer, defaults to `174`): the maximum children per node for the `balanced` and `trickle` DAG builder strategies +- `layerRepeat` (positive integer, defaults to 4): (only applicable to the `trickle` DAG builder strategy). The maximum repetition of parent nodes for each layer of the tree. +- `reduceSingleLeafToSelf` (boolean, defaults to `false`): optimization for, when reducing a set of nodes with one node, reduce it to that node. +- `dirBuilder` (object): the options for the directory builder + - `hamt` (object): the options for the HAMT sharded directory builder + - bits (positive integer, defaults to `5`): the number of bits at each bucket of the HAMT -### Example Exporter +### Exporter -``` -// Create an export readable object stream with the hash you want to export and a dag service -const filesStream = Exporter(, ) +#### Exporter example + +```js +// Create an export source pull-stream cid or ipfs path you want to export and a +// to fetch the file from +const filesStream = Exporter(, ) // Pipe the return stream to console filesStream.on('data', (file) => file.content.pipe(process.stdout)) ``` -### Exporter: API +#### Exporter API ```js const Exporter = require('ipfs-unixfs-engine').Exporter ``` -### new Exporter(, ) +### new Exporter(, ) Uses the given [dag API or an ipld-resolver instance][] to fetch an IPFS [UnixFS][] object(s) by their multiaddress. diff --git a/circle.yml b/circle.yml index 434211a7..c04dc470 100644 --- a/circle.yml +++ b/circle.yml @@ -8,5 +8,6 @@ dependencies: - wget -q -O - https://dl-ssl.google.com/linux/linux_signing_key.pub | sudo apt-key add - - sudo sh -c 'echo "deb [arch=amd64] http://dl.google.com/linux/chrome/deb/ stable main" >> /etc/apt/sources.list.d/google.list' - sudo apt-get update + - sudo apt-get install libpango-1.0-0=1.40.1-1ubuntu1 libpangocairo-1.0-0=1.40.1-1ubuntu1 libpangoft2-1.0-0=1.40.1-1ubuntu1 libpangoxft-1.0-0=1.40.1-1ubuntu1 - sudo apt-get --only-upgrade install google-chrome-stable - google-chrome --version diff --git a/package.json b/package.json index 156de975..5f0bb69a 100644 --- a/package.json +++ b/package.json @@ -40,9 +40,9 @@ "homepage": "https://github.com/ipfs/js-ipfs-unixfs-engine#readme", "devDependencies": { "aegir": "^11.0.2", - "chai": "^3.5.0", + "chai": "^4.0.1", "dirty-chai": "^1.2.2", - "ipfs": "^0.24.0", + "ipfs": "^0.24.1", "ipfs-block-service": "^0.9.1", "ipfs-repo": "^0.13.1", "ncp": "^2.0.0", @@ -54,12 +54,12 @@ }, "dependencies": { "async": "^2.4.1", + "bs58": "^4.0.1", "cids": "^0.5.0", "deep-extend": "^0.5.0", "ipfs-unixfs": "^0.1.11", "ipld-dag-pb": "^0.11.0", "ipld-resolver": "^0.11.1", - "is-ipfs": "^0.3.0", "left-pad": "^1.1.3", "lodash": "^4.17.4", "multihashes": "^0.4.5", @@ -67,6 +67,7 @@ "pull-batch": "^1.0.0", "pull-block": "^1.2.0", "pull-cat": "^1.1.11", + "pull-defer": "^0.2.2", "pull-pair": "^1.1.0", "pull-paramap": "^1.2.2", "pull-pause": "0.0.1", @@ -74,6 +75,7 @@ "pull-stream": "^3.6.0", "pull-traverse": "^1.0.3", "pull-write": "^1.1.2", + "safe-buffer": "^5.1.0", "sparse-array": "^1.3.1" }, "contributors": [ @@ -88,4 +90,4 @@ "jbenet ", "nginnever " ] -} \ No newline at end of file +} diff --git a/src/exporter/dir-flat.js b/src/exporter/dir-flat.js index b19d2dbd..8d04c1d5 100644 --- a/src/exporter/dir-flat.js +++ b/src/exporter/dir-flat.js @@ -9,28 +9,40 @@ const cat = require('pull-cat') // Logic to export a unixfs directory. module.exports = dirExporter -function dirExporter (node, name, ipldResolver, resolve, parent) { +function dirExporter (node, name, pathRest, ipldResolver, resolve, parent) { + const accepts = pathRest[0] + const dir = { path: name, hash: node.multihash } - return cat([ - pull.values([dir]), + const streams = [ pull( pull.values(node.links), pull.map((link) => ({ + linkName: link.name, path: path.join(name, link.name), hash: link.multihash })), + pull.filter((item) => accepts === undefined || item.linkName === accepts), paramap((item, cb) => ipldResolver.get(new CID(item.hash), (err, n) => { if (err) { return cb(err) } - cb(null, resolve(n.value, item.path, ipldResolver, name, parent)) + cb(null, resolve(n.value, accepts || item.path, pathRest, ipldResolver, name, parent)) })), pull.flatten() ) - ]) + ] + + // place dir before if not specifying subtree + if (!pathRest.length) { + streams.unshift(pull.values([dir])) + } + + pathRest.shift() + + return cat(streams) } diff --git a/src/exporter/dir-hamt-sharded.js b/src/exporter/dir-hamt-sharded.js index 01171bfb..c301169a 100644 --- a/src/exporter/dir-hamt-sharded.js +++ b/src/exporter/dir-hamt-sharded.js @@ -10,7 +10,7 @@ const cleanHash = require('./clean-multihash') // Logic to export a unixfs directory. module.exports = shardedDirExporter -function shardedDirExporter (node, name, ipldResolver, resolve, parent) { +function shardedDirExporter (node, name, pathRest, ipldResolver, resolve, parent) { let dir if (!parent || parent.path !== name) { dir = [{ @@ -19,30 +19,54 @@ function shardedDirExporter (node, name, ipldResolver, resolve, parent) { }] } - return cat([ - pull.values(dir), + const streams = [ pull( pull.values(node.links), pull.map((link) => { // remove the link prefix (2 chars for the bucket index) - let p = link.name.substring(2) - // another sharded dir or file? - p = p ? path.join(name, p) : name - - return { - name: link.name, - path: p, - hash: link.multihash + const p = link.name.substring(2) + const pp = p ? path.join(name, p) : name + let accept = true + let fromPathRest = false + + if (p && pathRest.length) { + fromPathRest = true + accept = (p === pathRest[0]) + } + if (accept) { + return { + fromPathRest: fromPathRest, + name: p, + path: pp, + hash: link.multihash, + pathRest: p ? pathRest.slice(1) : pathRest + } + } else { + return '' } }), + pull.filter(Boolean), paramap((item, cb) => ipldResolver.get(new CID(item.hash), (err, n) => { if (err) { return cb(err) } - cb(null, resolve(n.value, item.path, ipldResolver, (dir && dir[0]) || parent)) + cb( + null, + resolve( + n.value, + item.fromPathRest ? item.name : item.path, + item.pathRest, + ipldResolver, + (dir && dir[0]) || parent)) })), pull.flatten() ) - ]) + ] + + if (!pathRest.length) { + streams.unshift(pull.values(dir)) + } + + return cat(streams) } diff --git a/src/exporter/dir.js b/src/exporter/dir.js deleted file mode 100644 index 2ec2fb13..00000000 --- a/src/exporter/dir.js +++ /dev/null @@ -1,53 +0,0 @@ -'use strict' - -const path = require('path') -const pull = require('pull-stream') -const paramap = require('pull-paramap') -const CID = require('cids') -const cat = require('pull-cat') - -const fileExporter = require('./file') -const switchType = require('../util').switchType - -// Logic to export a unixfs directory. -module.exports = dirExporter - -function dirExporter (node, name, ipldResolver) { - // The algorithm below is as follows - // - // 1. Take all links from a given directory node - // 2. Map each link to their full name (parent + link name) + hash - // 3. Parallel map to - // 3.1. Resolve the hash against the dagService - // 3.2. Switch on the node type - // - `directory`: return node - // - `file`: use the fileExporter to load and return the file - // 4. Flatten - - return pull( - pull.values(node.links), - pull.map((link) => ({ - path: path.join(name, link.name), - hash: link.multihash - })), - paramap((item, cb) => ipldResolver.get(new CID(item.hash), (err, result) => { - if (err) { - return cb(err) - } - - const dir = { - path: item.path, - size: item.size - } - - const node = result.value - - cb(null, switchType( - node, - () => cat([pull.values([dir]), dirExporter(node, item.path, ipldResolver)]), - () => fileExporter(node, item.path, ipldResolver) - )) - })), - pull.flatten() - ) -} diff --git a/src/exporter/file.js b/src/exporter/file.js index 59a910a4..3329d122 100644 --- a/src/exporter/file.js +++ b/src/exporter/file.js @@ -7,7 +7,7 @@ const pull = require('pull-stream') const paramap = require('pull-paramap') // Logic to export a single (possibly chunked) unixfs file. -module.exports = (node, name, ipldResolver) => { +module.exports = (node, name, pathRest, ipldResolver) => { function getData (node) { try { const file = UnixFS.unmarshal(node.data) @@ -25,6 +25,12 @@ module.exports = (node, name, ipldResolver) => { ) } + const accepts = pathRest.shift() + + if (accepts !== undefined && accepts !== name) { + return pull.empty() + } + let content = pull( traverse.depthFirst(node, visitor), pull.map(getData) diff --git a/src/exporter/index.js b/src/exporter/index.js index f8f91e07..08017c9f 100644 --- a/src/exporter/index.js +++ b/src/exporter/index.js @@ -2,22 +2,62 @@ const pull = require('pull-stream') const CID = require('cids') -const isIPFS = require('is-ipfs') +const pullDefer = require('pull-defer') const resolve = require('./resolve').resolve -const cleanMultihash = require('./clean-multihash') -module.exports = (hash, ipldResolver) => { - if (!isIPFS.multihash(hash)) { - return pull.error(new Error('not valid multihash')) +function pathBaseAndRest (path) { + // Buffer -> raw multihash or CID in buffer + let pathBase = path + let pathRest = '/' + + if (Buffer.isBuffer(path)) { + pathBase = (new CID(path)).toBaseEncodedString() + } + + if (typeof path === 'string') { + if (path.indexOf('/ipfs/') === 0) { + path = pathBase = path.substring(6) + } + const subtreeStart = path.indexOf('/') + if (subtreeStart > 0) { + pathBase = path.substring(0, subtreeStart) + pathRest = path.substring(subtreeStart) + } + } else if (CID.isCID(pathBase)) { + pathBase = pathBase.toBaseEncodedString() + } + + pathBase = (new CID(pathBase)).toBaseEncodedString() + + return { + base: pathBase, + rest: pathRest.split('/').filter(Boolean) } +} + +module.exports = (path, dag) => { + try { + path = pathBaseAndRest(path) + } catch (err) { + return pull.error(err) + } + + const d = pullDefer.source() + + const cid = new CID(path.base) - hash = cleanMultihash(hash) + dag.get(cid, (err, node) => { + if (err) { + return pull.error(err) + } + d.resolve(pull.values([node])) + }) return pull( - ipldResolver.getStream(new CID(hash)), + d, pull.map((result) => result.value), - pull.map((node) => resolve(node, hash, ipldResolver)), + pull.map((node) => resolve(node, path.base, path.rest, dag)), pull.flatten() ) } diff --git a/src/exporter/object.js b/src/exporter/object.js new file mode 100644 index 00000000..c58ac0ed --- /dev/null +++ b/src/exporter/object.js @@ -0,0 +1,36 @@ +'use strict' + +const path = require('path') +const CID = require('cids') +const pull = require('pull-stream') +const pullDefer = require('pull-defer') + +module.exports = (node, name, pathRest, ipldResolver, resolve) => { + let newNode + if (pathRest.length) { + const pathElem = pathRest.shift() + newNode = node[pathElem] + const newName = path.join(name, pathElem) + if (CID.isCID(newNode)) { + const d = pullDefer.source() + ipldResolver.get(sanitizeCID(newNode), (err, newNode) => { + if (err) { + d.resolve(pull.error(err)) + } else { + d.resolve(resolve(newNode.value, newName, pathRest, ipldResolver, node)) + } + }) + return d + } else if (newNode !== undefined) { + return resolve(newNode, newName, pathRest, ipldResolver, node) + } else { + return pull.error('not found') + } + } else { + return pull.error(new Error('invalid node type')) + } +} + +function sanitizeCID (cid) { + return new CID(cid.version, cid.codec, cid.multihash) +} diff --git a/src/exporter/resolve.js b/src/exporter/resolve.js index 53259a9a..71b1067a 100644 --- a/src/exporter/resolve.js +++ b/src/exporter/resolve.js @@ -6,7 +6,8 @@ const pull = require('pull-stream') const resolvers = { directory: require('./dir-flat'), 'hamt-sharded-directory': require('./dir-hamt-sharded'), - file: require('./file') + file: require('./file'), + object: require('./object') } module.exports = Object.assign({ @@ -14,17 +15,19 @@ module.exports = Object.assign({ typeOf: typeOf }, resolvers) -function resolve (node, name, ipldResolver, parentNode) { +function resolve (node, hash, pathRest, ipldResolver, parentNode) { const type = typeOf(node) const resolver = resolvers[type] if (!resolver) { return pull.error(new Error('Unkown node type ' + type)) } - let stream = resolver(node, name, ipldResolver, resolve, parentNode) - return stream + return resolver(node, hash, pathRest, ipldResolver, resolve, parentNode) } function typeOf (node) { - const data = UnixFS.unmarshal(node.data) - return data.type + if (Buffer.isBuffer(node.data)) { + return UnixFS.unmarshal(node.data).type + } else { + return 'object' + } } diff --git a/test/node.js b/test/node.js index 7508b997..4a54048b 100644 --- a/test/node.js +++ b/test/node.js @@ -44,6 +44,7 @@ describe('IPFS UnixFS Engine', () => { require('./test-consumable-hash') require('./test-hamt') require('./test-exporter')(repo) + require('./test-export-subtree')(repo) require('./test-importer')(repo) require('./test-importer-flush')(repo) require('./test-import-export')(repo) diff --git a/test/test-dirbuilder-sharding.js b/test/test-dirbuilder-sharding.js index c7ea7741..d575b161 100644 --- a/test/test-dirbuilder-sharding.js +++ b/test/test-dirbuilder-sharding.js @@ -334,6 +334,30 @@ module.exports = (repo) => { } } }) + + it('exports a big dir with subpath', (done) => { + const exportHash = mh.toB58String(rootHash) + '/big/big/2000' + pull( + exporter(exportHash, ipldResolver), + pull.collect(collected) + ) + + function collected (err, nodes) { + expect(err).to.not.exist() + expect(nodes.length).to.equal(1) + expect(nodes.map((node) => node.path)).to.deep.equal([ + '2000' + ]) + pull( + nodes[0].content, + pull.collect((err, content) => { + expect(err).to.not.exist() + expect(content.toString()).to.equal('2000') + done() + }) + ) + } + }) }) }) } diff --git a/test/test-export-subtree.js b/test/test-export-subtree.js new file mode 100644 index 00000000..93067650 --- /dev/null +++ b/test/test-export-subtree.js @@ -0,0 +1,113 @@ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +chai.use(require('dirty-chai')) +const expect = chai.expect +const BlockService = require('ipfs-block-service') +const IPLDResolver = require('ipld-resolver') +const CID = require('cids') +const loadFixture = require('aegir/fixtures') + +const pull = require('pull-stream') +const Buffer = require('safe-buffer').Buffer + +const unixFSEngine = require('./../src') +const exporter = unixFSEngine.exporter + +const smallFile = loadFixture(__dirname, 'fixtures/200Bytes.txt') + +module.exports = (repo) => { + describe('exporter', () => { + let ipldResolver + + before(() => { + const bs = new BlockService(repo) + ipldResolver = new IPLDResolver(bs) + }) + + it('export a file 2 levels down', (done) => { + const hash = 'QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/level-1/200Bytes.txt' + + pull( + exporter(hash, ipldResolver), + pull.collect((err, files) => { + expect(err).to.not.exist() + expect(files.length).to.equal(1) + expect(files[0].path).to.equal('200Bytes.txt') + fileEql(files[0], smallFile, done) + }) + ) + }) + + it('export dir 1 level down', (done) => { + const hash = 'QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/level-1' + + pull( + exporter(hash, ipldResolver), + pull.collect((err, files) => { + console.log(files) + expect(err).to.not.exist() + expect(files.length).to.equal(3) + expect(files[0].path).to.equal('level-1') + expect(files[1].path).to.equal('level-1/200Bytes.txt') + expect(files[2].path).to.equal('level-1/level-2') + fileEql(files[1], smallFile, done) + }) + ) + }) + + it('export a non existing file', (done) => { + const hash = 'QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/doesnotexist' + + pull( + exporter(hash, ipldResolver), + pull.collect((err, files) => { + expect(err).to.not.exist() + expect(files.length).to.equal(0) + done() + }) + ) + }) + + it('exports starting from non-protobuf node', (done) => { + const doc = { a: { file: new CID('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN') } } + ipldResolver.put(doc, { format: 'dag-cbor' }, (err, cid) => { + expect(err).to.not.exist() + const nodeCID = cid.toBaseEncodedString() + + pull( + exporter(nodeCID + '/a/file/level-1/200Bytes.txt', ipldResolver), + pull.collect((err, files) => { + expect(err).to.not.exist() + expect(files.length).to.equal(1) + expect(files[0].path).to.equal('200Bytes.txt') + fileEql(files[0], smallFile, done) + }) + ) + }) + }) + }) +} + +function fileEql (f1, f2, done) { + pull( + f1.content, + pull.collect((err, data) => { + if (err) { + return done(err) + } + + try { + if (f2) { + expect(Buffer.concat(data)).to.eql(f2) + } else { + expect(data).to.exist() + } + } catch (err) { + return done(err) + } + done() + }) + ) +} diff --git a/test/test-exporter.js b/test/test-exporter.js index fa4de22c..036e48df 100644 --- a/test/test-exporter.js +++ b/test/test-exporter.js @@ -12,6 +12,7 @@ const pull = require('pull-stream') const zip = require('pull-zip') const CID = require('cids') const loadFixture = require('aegir/fixtures') +const Buffer = require('safe-buffer').Buffer const unixFSEngine = require('./../src') const exporter = unixFSEngine.exporter @@ -29,7 +30,7 @@ module.exports = (repo) => { it('ensure hash inputs are sanitized', (done) => { const hash = 'QmQmZQxSKQppbsWfVzBvg59Cn3DKtsNVQ94bjAxg2h3Lb8' - const mhBuf = new Buffer(bs58.decode(hash)) + const mhBuf = Buffer.from(bs58.decode(hash)) const cid = new CID(hash) ipldResolver.get(cid, (err, result) => { @@ -85,6 +86,19 @@ module.exports = (repo) => { ) }) + it('export a small file with links using CID instead of multihash', (done) => { + const cid = new CID('QmW7BDxEbGqxxSYVtn3peNPQgdDXbWkoQ6J1EFYAEuQV3Q') + + pull( + exporter(cid, ipldResolver), + pull.collect((err, files) => { + expect(err).to.not.exist() + + fileEql(files[0], bigFile, done) + }) + ) + }) + it('export a large file > 5mb', (done) => { const hash = 'QmRQgufjp9vLE8XK2LGKZSsPCFCF6e4iynCQtNB5X2HBKE' pull( @@ -150,7 +164,10 @@ module.exports = (repo) => { ) }) - it('fails on non existent hash', (done) => { + // TODO: This needs for the stores to have timeouts, + // otherwise it is impossible to predict if a file doesn't + // really exist + it.skip('fails on non existent hash', (done) => { // This hash doesn't exist in the repo const hash = 'QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKj3'