diff --git a/SPEC/REFS.md b/SPEC/REFS.md
index e86fea704..3dd26a398 100644
--- a/SPEC/REFS.md
+++ b/SPEC/REFS.md
@@ -102,9 +102,12 @@ pull(
 
 #### `refs.local`
 
-> Output all local references (CIDs of all blocks in the blockstore)
+> Output all local references (CIDs of all blocks in the blockstore. CIDs are reconstructed, hence they might not match the CIDs under the blocks were originally stored)
 
-##### `ipfs.refs.local([callback])`
+##### `ipfs.refs.local([options], [callback])`
+
+`options` is an optional object that may contain the following keys:
+  - `multihash (false)`: instead of reconstructed CIDs, the original multihashes are returned as base32 encoded strings
 
 `callback` must follow `function (err, refs) {}` signature, where `err` is an error if the operation was not successful and `refs` is an array of `{ ref: "myref", err: "error msg" }`
 
@@ -129,10 +132,13 @@ ipfs.refs.local(function (err, refs) {
 })
 ```
 
-#### `refs.localReadableStream`
+#### `refs.localReadableStream([options])`
 
 > Output all local references using a [Readable Stream][rs]
 
+`options` is an optional object that may contain the following keys:
+  - `multihash (false)`: instead of reconstructed CIDs, the original multihashes are returned as base32 encoded strings
+
 ##### `ipfs.localReadableStream()` -> [Readable Stream][rs]
 
 **Example:**
@@ -148,10 +154,13 @@ stream.on('data', function (ref) {
 })
 ```
 
-#### `refs.localPullStream`
+#### `refs.localPullStream([options])`
 
 > Output all local references using a [Pull Stream][ps].
 
+`options` is an optional object that may contain the following keys:
+  - `multihash (false)`: instead of reconstructed CIDs, the original multihashes are returned as base32 encoded strings
+
 ##### `ipfs.refs.localReadableStream()` -> [Pull Stream][ps]
 
 **Example:**
diff --git a/SPEC/REPO.md b/SPEC/REPO.md
index c0abc44f0..2c0a81e9a 100644
--- a/SPEC/REPO.md
+++ b/SPEC/REPO.md
@@ -20,7 +20,7 @@ Where:
 - `err` is an Error if the whole GC operation was not successful.
 - `res` is an array of objects that contains the following properties
   - `err` is an Error if it was not possible to GC a particular block.
-  - `cid` is the [CID][cid] of the block that was Garbage Collected.
+  - `multihash` is the [multihashes][] of the block that was Garbage Collected.
 
 If no `callback` is passed, a promise is returned.
 
@@ -84,4 +84,4 @@ ipfs.repo.version((err, version) => console.log(version))
 ```
 
 [1]: https://github.com/MikeMcl/bignumber.js/
-[cid]: https://www.npmjs.com/package/cids
+[multihashes]: https://www.npmjs.com/package/multihashes
diff --git a/src/files-regular/refs-local-tests.js b/src/files-regular/refs-local-tests.js
index af6f7fcb8..4e78c167a 100644
--- a/src/files-regular/refs-local-tests.js
+++ b/src/files-regular/refs-local-tests.js
@@ -49,8 +49,8 @@ module.exports = (createCommon, suiteName, ipfsRefsLocal, options) => {
           expect(err).to.not.exist()
 
           const cids = refs.map(r => r.ref)
-          expect(cids).to.include('QmVwdDCY4SPGVFnNCiZnX5CtzwWDn6kAM98JXzKxE3kCmn')
-          expect(cids).to.include('QmR4nFjTu18TyANgC65ArNWp5Yaab1gPzQ4D8zp7Kx3vhr')
+          expect(cids).to.include('bafkreicuinkdxczmxol5edpb2jumkbkvtoehj6qixz6yvvxgstp3cr5hey')
+          expect(cids).to.include('bafkreigm5vpfwjayhkmp7d3gc6hwj4c536ns6ajxi3cyi3uulta45rpyzy')
 
           done()
         })
diff --git a/src/repo/gc.js b/src/repo/gc.js
index e2d0158b0..04fb03a2e 100644
--- a/src/repo/gc.js
+++ b/src/repo/gc.js
@@ -3,6 +3,12 @@
 
 const { getDescribe, getIt, expect } = require('../utils/mocha')
 const { DAGNode } = require('ipld-dag-pb')
+const CID = require('cids')
+
+function cidV0ToV1Raw (hash) {
+  const multihash = new CID(hash).multihash
+  return new CID(1, 'raw', multihash).toString()
+}
 
 module.exports = (createCommon, options) => {
   const describe = getDescribe(options)
@@ -52,12 +58,13 @@ module.exports = (createCommon, options) => {
       // information that refers to the blocks
       const addRes = await ipfs.add(Buffer.from('apples'))
       const hash = addRes[0].hash
+      const cidV1 = cidV0ToV1Raw(hash)
 
       // Get the list of local blocks after the add, should be bigger than
       // the initial list and contain hash
       const refsAfterAdd = await ipfs.refs.local()
       expect(refsAfterAdd.length).to.be.gt(refsBeforeAdd.length)
-      expect(refsAfterAdd.map(r => r.ref)).includes(hash)
+      expect(refsAfterAdd.map(r => r.ref)).includes(cidV1)
 
       // Run garbage collection
       await ipfs.repo.gc()
@@ -65,7 +72,7 @@ module.exports = (createCommon, options) => {
       // Get the list of local blocks after GC, should still contain the hash,
       // because the file is still pinned
       const refsAfterGc = await ipfs.refs.local()
-      expect(refsAfterGc.map(r => r.ref)).includes(hash)
+      expect(refsAfterGc.map(r => r.ref)).includes(cidV1)
 
       // Unpin the data
       await ipfs.pin.rm(hash)
@@ -75,7 +82,7 @@ module.exports = (createCommon, options) => {
 
       // The list of local blocks should no longer contain the hash
       const refsAfterUnpinAndGc = await ipfs.refs.local()
-      expect(refsAfterUnpinAndGc.map(r => r.ref)).not.includes(hash)
+      expect(refsAfterUnpinAndGc.map(r => r.ref)).not.includes(cidV1)
     })
 
     it('should clean up removed MFS files', async () => {
@@ -86,13 +93,13 @@ module.exports = (createCommon, options) => {
       await ipfs.files.write('/test', Buffer.from('oranges'), { create: true })
       const stats = await ipfs.files.stat('/test')
       expect(stats.type).to.equal('file')
-      const hash = stats.hash
+      const cidV1 = cidV0ToV1Raw(stats.hash)
 
       // Get the list of local blocks after the add, should be bigger than
       // the initial list and contain hash
       const refsAfterAdd = await ipfs.refs.local()
       expect(refsAfterAdd.length).to.be.gt(refsBeforeAdd.length)
-      expect(refsAfterAdd.map(r => r.ref)).includes(hash)
+      expect(refsAfterAdd.map(r => r.ref)).includes(cidV1)
 
       // Run garbage collection
       await ipfs.repo.gc()
@@ -100,7 +107,7 @@ module.exports = (createCommon, options) => {
       // Get the list of local blocks after GC, should still contain the hash,
       // because the file is in MFS
       const refsAfterGc = await ipfs.refs.local()
-      expect(refsAfterGc.map(r => r.ref)).includes(hash)
+      expect(refsAfterGc.map(r => r.ref)).includes(cidV1)
 
       // Remove the file
       await ipfs.files.rm('/test')
@@ -110,7 +117,7 @@ module.exports = (createCommon, options) => {
 
       // The list of local blocks should no longer contain the hash
       const refsAfterUnpinAndGc = await ipfs.refs.local()
-      expect(refsAfterUnpinAndGc.map(r => r.ref)).not.includes(hash)
+      expect(refsAfterUnpinAndGc.map(r => r.ref)).not.includes(cidV1)
     })
 
     it('should clean up block only after unpinned and removed from MFS', async () => {
@@ -121,21 +128,22 @@ module.exports = (createCommon, options) => {
       await ipfs.files.write('/test', Buffer.from('peaches'), { create: true })
       const stats = await ipfs.files.stat('/test')
       expect(stats.type).to.equal('file')
-      const mfsFileHash = stats.hash
+      const mfsFileCidV1 = cidV0ToV1Raw(stats.hash)
 
       // Get the CID of the data in the file
-      const block = await ipfs.block.get(mfsFileHash)
+      const block = await ipfs.block.get(mfsFileCidV1)
 
       // Add the data to IPFS (which implicitly pins the data)
       const addRes = await ipfs.add(block.data)
       const dataHash = addRes[0].hash
+      const dataCidV1 = cidV0ToV1Raw(dataHash)
 
       // Get the list of local blocks after the add, should be bigger than
       // the initial list and contain the data hash
       const refsAfterAdd = await ipfs.refs.local()
       expect(refsAfterAdd.length).to.be.gt(refsBeforeAdd.length)
       const hashesAfterAdd = refsAfterAdd.map(r => r.ref)
-      expect(hashesAfterAdd).includes(dataHash)
+      expect(hashesAfterAdd).includes(dataCidV1)
 
       // Run garbage collection
       await ipfs.repo.gc()
@@ -144,7 +152,7 @@ module.exports = (createCommon, options) => {
       // because the file is pinned and in MFS
       const refsAfterGc = await ipfs.refs.local()
       const hashesAfterGc = refsAfterGc.map(r => r.ref)
-      expect(hashesAfterGc).includes(dataHash)
+      expect(hashesAfterGc).includes(dataCidV1)
 
       // Remove the file
       await ipfs.files.rm('/test')
@@ -156,8 +164,8 @@ module.exports = (createCommon, options) => {
       // because the file is still pinned
       const refsAfterRmAndGc = await ipfs.refs.local()
       const hashesAfterRmAndGc = refsAfterRmAndGc.map(r => r.ref)
-      expect(hashesAfterRmAndGc).not.includes(mfsFileHash)
-      expect(hashesAfterRmAndGc).includes(dataHash)
+      expect(hashesAfterRmAndGc).not.includes(mfsFileCidV1)
+      expect(hashesAfterRmAndGc).includes(dataCidV1)
 
       // Unpin the data
       await ipfs.pin.rm(dataHash)
@@ -168,8 +176,8 @@ module.exports = (createCommon, options) => {
       // The list of local blocks should no longer contain the hashes
       const refsAfterUnpinAndGc = await ipfs.refs.local()
       const hashesAfterUnpinAndGc = refsAfterUnpinAndGc.map(r => r.ref)
-      expect(hashesAfterUnpinAndGc).not.includes(mfsFileHash)
-      expect(hashesAfterUnpinAndGc).not.includes(dataHash)
+      expect(hashesAfterUnpinAndGc).not.includes(mfsFileCidV1)
+      expect(hashesAfterUnpinAndGc).not.includes(dataCidV1)
     })
 
     it('should clean up indirectly pinned data after recursive pin removal', async () => {
@@ -179,6 +187,7 @@ module.exports = (createCommon, options) => {
       // Add some data
       const addRes = await ipfs.add(Buffer.from('pears'))
       const dataHash = addRes[0].hash
+      const dataHashCidV1 = cidV0ToV1Raw(dataHash)
 
       // Unpin the data
       await ipfs.pin.rm(dataHash)
@@ -192,6 +201,7 @@ module.exports = (createCommon, options) => {
 
       // Put the object into IPFS
       const objHash = (await ipfs.object.put(obj)).toString()
+      const objCidV1 = cidV0ToV1Raw(objHash)
 
       // Putting an object doesn't pin it
       expect((await ipfs.pin.ls()).map(p => p.hash)).not.includes(objHash)
@@ -201,8 +211,8 @@ module.exports = (createCommon, options) => {
       const refsAfterAdd = await ipfs.refs.local()
       expect(refsAfterAdd.length).to.be.gt(refsBeforeAdd.length)
       const hashesAfterAdd = refsAfterAdd.map(r => r.ref)
-      expect(hashesAfterAdd).includes(objHash)
-      expect(hashesAfterAdd).includes(dataHash)
+      expect(hashesAfterAdd).includes(objCidV1)
+      expect(hashesAfterAdd).includes(dataHashCidV1)
 
       // Recursively pin the object
       await ipfs.pin.add(objHash, { recursive: true })
@@ -217,7 +227,7 @@ module.exports = (createCommon, options) => {
       // Get the list of local blocks after GC, should still contain the data
       // hash, because the data is still (indirectly) pinned
       const refsAfterGc = await ipfs.refs.local()
-      expect(refsAfterGc.map(r => r.ref)).includes(dataHash)
+      expect(refsAfterGc.map(r => r.ref)).includes(dataHashCidV1)
 
       // Recursively unpin the object
       await ipfs.pin.rm(objHash)
@@ -228,8 +238,8 @@ module.exports = (createCommon, options) => {
       // The list of local blocks should no longer contain the hashes
       const refsAfterUnpinAndGc = await ipfs.refs.local()
       const hashesAfterUnpinAndGc = refsAfterUnpinAndGc.map(r => r.ref)
-      expect(hashesAfterUnpinAndGc).not.includes(objHash)
-      expect(hashesAfterUnpinAndGc).not.includes(dataHash)
+      expect(hashesAfterUnpinAndGc).not.includes(objCidV1)
+      expect(hashesAfterUnpinAndGc).not.includes(dataHashCidV1)
     })
   })
 }