node-cacache/test/get.js

512 lines
12 KiB
JavaScript
Raw Normal View History

'use strict'
2024-06-15 17:16:15 +08:00
const fs = require('fs/promises')
const index = require('../lib/entry-index')
const memo = require('../lib/memoization')
const path = require('path')
2024-06-15 17:16:15 +08:00
const t = require('tap')
const ssri = require('ssri')
2024-06-15 17:16:15 +08:00
const CacheContent = require('./fixtures/cache-content')
const CONTENT = Buffer.from('foobarbaz', 'utf8')
const SIZE = CONTENT.length
const KEY = 'my-test-key'
const INTEGRITY = ssri.fromData(CONTENT).toString()
const METADATA = { foo: 'bar' }
const { get } = require('..')
function opts (extra) {
return Object.assign(
{
size: SIZE,
2024-06-15 17:16:15 +08:00
metadata: METADATA,
},
extra
)
}
// Simple wrapper util cause this gets WORDY
function streamGet (byDigest) {
const args = [].slice.call(arguments, 1)
let integrity
let metadata
let size
const stream = (byDigest ? get.stream.byDigest : get.stream).apply(null, args)
return stream
.on('integrity', (int) => {
integrity = ssri.stringify(int)
})
.on('metadata', (m) => {
metadata = m
})
.on('size', (s) => {
size = s
})
.concat()
.then((data) => ({
data,
integrity,
metadata,
2024-06-15 17:16:15 +08:00
size,
}))
}
2024-06-15 17:16:15 +08:00
t.test('get.info index entry lookup', async t => {
const CACHE = t.testdir()
const indexInsert = await index.insert(CACHE, KEY, INTEGRITY, opts())
const entry = await get.info(CACHE, KEY)
t.same(entry, indexInsert, 'get.info() returned the right entry')
})
2024-06-15 17:16:15 +08:00
t.test('get will throw ENOENT if not found', (t) => {
const CACHE = t.testdir()
return get(CACHE, KEY)
.then(() => {
throw new Error('lookup should fail')
})
.catch((err) => {
t.ok(err, 'got an error')
t.equal(err.code, 'ENOENT', 'error code is ENOENT')
return get.info(CACHE, KEY)
})
.catch((err) => {
t.ok(err, 'got an error')
t.equal(err.code, 'ENOENT', 'error code is ENOENT')
})
})
2024-06-15 17:16:15 +08:00
t.test('basic bulk get', async t => {
const CACHE = t.testdir(
CacheContent({
2024-06-15 17:16:15 +08:00
[INTEGRITY]: CONTENT,
})
)
2024-06-15 17:16:15 +08:00
await index.insert(CACHE, KEY, INTEGRITY, opts())
await t.resolveMatch(
get(CACHE, KEY),
{
metadata: METADATA,
data: CONTENT,
integrity: INTEGRITY,
size: SIZE,
},
'bulk key get returned proper data'
)
await t.resolveMatch(
get.byDigest(CACHE, INTEGRITY),
CONTENT,
'byDigest returned proper data'
)
})
2024-06-15 17:16:15 +08:00
t.test('get.byDigest without memoization', async t => {
const CACHE = t.testdir(
CacheContent({
2024-06-15 17:16:15 +08:00
[INTEGRITY]: CONTENT,
})
)
2024-06-15 17:16:15 +08:00
await index.insert(CACHE, KEY, INTEGRITY, opts())
const res = await get(CACHE, KEY)
t.same(
res,
{
metadata: METADATA,
data: CONTENT,
integrity: INTEGRITY,
2024-06-15 17:16:15 +08:00
size: SIZE,
},
2024-06-15 17:16:15 +08:00
'bulk key get returned proper data')
memo.clearMemoized()
t.same(memo.get.byDigest(CACHE, INTEGRITY), undefined)
const resByDig = await get.byDigest(CACHE, INTEGRITY)
t.same(resByDig, CONTENT, 'byDigest returned proper data')
t.same(memo.get.byDigest(CACHE, INTEGRITY), undefined)
const resByDigMemo = await get.byDigest(CACHE, INTEGRITY)
t.same(resByDigMemo, CONTENT, 'byDigest returned proper data')
})
2024-06-15 17:16:15 +08:00
t.test('get.byDigest with memoization', async t => {
const CACHE = t.testdir(
CacheContent({
2024-06-15 17:16:15 +08:00
[INTEGRITY]: CONTENT,
})
)
2024-06-15 17:16:15 +08:00
await index.insert(CACHE, KEY, INTEGRITY, opts())
const res = await get(CACHE, KEY)
t.same(
res,
{
metadata: METADATA,
data: CONTENT,
integrity: INTEGRITY,
2024-06-15 17:16:15 +08:00
size: SIZE,
},
2024-06-15 17:16:15 +08:00
'bulk key get returned proper data')
memo.clearMemoized()
t.same(memo.get.byDigest(CACHE, INTEGRITY), undefined)
2024-06-15 17:16:15 +08:00
const resByDig = await get.byDigest(CACHE, INTEGRITY, { memoize: true })
t.same(resByDig, CONTENT, 'byDigest returned proper data')
t.notSame(memo.get.byDigest(CACHE, INTEGRITY), undefined)
2024-06-15 17:16:15 +08:00
const resByDigMemo = await get.byDigest(CACHE, INTEGRITY, { memoize: true })
t.same(resByDigMemo, CONTENT, 'byDigest returned proper data')
})
2024-06-15 17:16:15 +08:00
t.test('get without memoization', async t => {
const CACHE = t.testdir(
CacheContent({
2024-06-15 17:16:15 +08:00
[INTEGRITY]: CONTENT,
})
)
2024-06-15 17:16:15 +08:00
await index.insert(CACHE, KEY, INTEGRITY, opts())
const res = await get(CACHE, KEY)
t.same(
res,
{
metadata: METADATA,
data: CONTENT,
integrity: INTEGRITY,
2024-06-15 17:16:15 +08:00
size: SIZE,
},
2024-06-15 17:16:15 +08:00
'bulk key get returned proper data')
memo.clearMemoized()
t.same(memo.get(CACHE, KEY), undefined)
const resByDig = await get(CACHE, KEY)
t.same(resByDig, {
metadata: METADATA,
data: CONTENT,
integrity: INTEGRITY,
2024-06-15 17:16:15 +08:00
size: SIZE,
}, 'get returned proper data')
2024-06-15 17:16:15 +08:00
t.same(memo.get(CACHE, KEY), undefined)
2024-06-15 17:16:15 +08:00
const resByDigMemo = await get(CACHE, KEY)
t.same(resByDigMemo, {
metadata: METADATA,
data: CONTENT,
integrity: INTEGRITY,
size: SIZE,
}, 'get returned proper data')
})
2024-06-15 17:16:15 +08:00
t.test('get with memoization', async t => {
const CACHE = t.testdir(
CacheContent({
2024-06-15 17:16:15 +08:00
[INTEGRITY]: CONTENT,
})
)
2024-06-15 17:16:15 +08:00
await index.insert(CACHE, KEY, INTEGRITY, opts())
const res = await get(CACHE, KEY)
t.same(
res,
{
metadata: METADATA,
data: CONTENT,
integrity: INTEGRITY,
size: SIZE,
},
'bulk key get returned proper data')
2024-06-15 17:16:15 +08:00
memo.clearMemoized()
t.same(memo.get(CACHE, KEY), undefined)
const resByDig = await get(CACHE, KEY, { memoize: true })
t.same(resByDig, {
metadata: METADATA,
data: CONTENT,
integrity: INTEGRITY,
size: SIZE,
}, 'get returned proper data')
t.notSame(memo.get(CACHE, KEY), undefined)
2024-06-15 17:16:15 +08:00
const resByDigMemo = await get(CACHE, KEY, { memoize: true })
t.same(resByDigMemo, {
metadata: METADATA,
data: CONTENT,
integrity: INTEGRITY,
size: SIZE,
}, 'get returned proper data')
})
2024-06-15 17:16:15 +08:00
t.test('basic stream get', async t => {
const CACHE = t.testdir(
CacheContent({
2024-06-15 17:16:15 +08:00
[INTEGRITY]: CONTENT,
})
)
2024-06-15 17:16:15 +08:00
await index.insert(CACHE, KEY, INTEGRITY, opts())
const [byKey, byDigest] = await Promise.all([
streamGet(false, CACHE, KEY),
streamGet(true, CACHE, INTEGRITY),
])
t.same(
byKey,
{
data: CONTENT,
integrity: INTEGRITY,
metadata: METADATA,
size: SIZE,
},
'got all expected data and fields from key fetch'
)
2024-06-15 17:16:15 +08:00
t.same(byDigest.data, CONTENT, 'got correct data from digest fetch')
})
2024-06-15 17:16:15 +08:00
t.test('get.stream add new listeners post stream creation', async (t) => {
const CACHE = t.testdir(
CacheContent({
2024-06-15 17:16:15 +08:00
[INTEGRITY]: CONTENT,
})
)
return index.insert(CACHE, KEY, INTEGRITY, opts()).then(() => {
const OPTS = { memoize: false, size: CONTENT.length }
const stream = get.stream(CACHE, KEY, OPTS)
2024-06-15 17:16:15 +08:00
return Promise.all([
new Promise((resolve) => stream.on('integrity', resolve)),
new Promise((resolve) => stream.on('metadata', resolve)),
new Promise((resolve) => stream.on('size', resolve)),
]).then(() => {
[
'integrity',
'metadata',
'size',
].forEach(ev => {
stream.on(ev, () => {
t.ok(`${ev} listener added`)
})
})
2024-06-15 17:16:15 +08:00
return stream.concat()
})
})
})
2024-06-15 17:16:15 +08:00
t.test('get.copy will throw ENOENT if not found', (t) => {
const CACHE = t.testdir()
const DEST = path.join(CACHE, 'not-found')
return get.copy(CACHE, 'NOT-FOUND', DEST)
.then(() => {
throw new Error('lookup should fail')
})
.catch((err) => {
t.ok(err, 'got an error')
t.equal(err.code, 'ENOENT', 'error code is ENOENT')
})
})
2024-06-15 17:16:15 +08:00
t.test('get.copy with fs.copyfile', (t) => {
const CACHE = t.testdir(
CacheContent({
2024-06-15 17:16:15 +08:00
[INTEGRITY]: CONTENT,
})
)
const DEST = path.join(CACHE, 'copymehere')
return index
.insert(CACHE, KEY, INTEGRITY, opts())
.then(() => get.copy(CACHE, KEY, DEST))
.then((res) => {
2024-06-15 17:16:15 +08:00
t.same(
res,
{
metadata: METADATA,
integrity: INTEGRITY,
2024-06-15 17:16:15 +08:00
size: SIZE,
},
'copy operation returns basic metadata'
)
2024-06-15 17:16:15 +08:00
return fs.readFile(DEST)
})
.then((data) => {
2024-06-15 17:16:15 +08:00
t.same(data, CONTENT, 'data copied by key matches')
return fs.rm(DEST, { recursive: true, force: true })
})
.then(() => get.copy.byDigest(CACHE, INTEGRITY, DEST))
2024-06-15 17:16:15 +08:00
.then(() => fs.readFile(DEST))
.then((data) => {
2024-06-15 17:16:15 +08:00
t.same(data, CONTENT, 'data copied by digest matches')
return fs.rm(DEST, { recursive: true, force: true })
})
})
2024-06-15 17:16:15 +08:00
t.test('memoizes data on bulk read', (t) => {
memo.clearMemoized()
2024-06-15 17:16:15 +08:00
const CACHE = t.testdir(
CacheContent({
2024-06-15 17:16:15 +08:00
[INTEGRITY]: CONTENT,
})
)
return index.insert(CACHE, KEY, INTEGRITY, opts()).then((ENTRY) => {
return get(CACHE, KEY)
.then(() => {
2024-06-15 17:16:15 +08:00
t.same(memo.get(CACHE, KEY), null, 'no memoization!')
return get(CACHE, KEY, { memoize: true })
})
.then((res) => {
2024-06-15 17:16:15 +08:00
t.same(
res,
{
metadata: METADATA,
data: CONTENT,
integrity: INTEGRITY,
2024-06-15 17:16:15 +08:00
size: SIZE,
},
'usual data returned'
)
2024-06-15 17:16:15 +08:00
t.same(
memo.get(CACHE, KEY),
{
entry: ENTRY,
2024-06-15 17:16:15 +08:00
data: CONTENT,
},
'data inserted into memoization cache'
)
2024-06-15 17:16:15 +08:00
return fs.rm(CACHE, { recursive: true, force: true })
})
.then(() => {
return get(CACHE, KEY)
})
.then((res) => {
2024-06-15 17:16:15 +08:00
t.same(
res,
{
metadata: METADATA,
data: CONTENT,
integrity: INTEGRITY,
2024-06-15 17:16:15 +08:00
size: SIZE,
},
'memoized data fetched by default'
)
return get(CACHE, KEY, { memoize: false })
.then(() => {
throw new Error('expected get to fail')
})
.catch((err) => {
t.ok(err, 'got an error from unmemoized get')
t.equal(err.code, 'ENOENT', 'cached content not found')
2024-06-15 17:16:15 +08:00
t.same(
memo.get(CACHE, KEY),
{
entry: ENTRY,
2024-06-15 17:16:15 +08:00
data: CONTENT,
},
'data still in memoization cache'
)
})
})
})
})
2024-06-15 17:16:15 +08:00
t.test('memoizes data on stream read', async t => {
memo.clearMemoized()
2024-06-15 17:16:15 +08:00
const CACHE = t.testdir(
CacheContent({
2024-06-15 17:16:15 +08:00
[INTEGRITY]: CONTENT,
})
)
2024-06-15 17:16:15 +08:00
const ENTRY = await index.insert(CACHE, KEY, INTEGRITY, opts())
await Promise.all([
streamGet(false, CACHE, KEY),
streamGet(true, CACHE, INTEGRITY),
])
t.same(memo.get(CACHE, KEY), null, 'no memoization by key!')
t.same(
memo.get.byDigest(CACHE, INTEGRITY),
null,
'no memoization by digest!'
)
memo.clearMemoized()
const byDigest = await streamGet(true, CACHE, INTEGRITY, {
memoize: true,
})
2024-06-15 17:16:15 +08:00
t.same(byDigest.data, CONTENT, 'usual data returned from stream')
t.same(memo.get(CACHE, KEY), null, 'digest fetch = no key entry')
t.same(
memo.get.byDigest(CACHE, INTEGRITY),
CONTENT,
'content memoized'
)
t.same(
memo.get.byDigest('whatev', INTEGRITY),
null,
'content memoization filtered by cache'
)
memo.clearMemoized()
await t.resolveMatch(
streamGet(false, CACHE, KEY, { memoize: true }),
{
metadata: METADATA,
data: CONTENT,
integrity: INTEGRITY,
size: SIZE,
},
'usual data returned from key fetch'
)
t.same(
memo.get(CACHE, KEY),
{
entry: ENTRY,
data: CONTENT,
},
'data inserted into memoization cache'
)
t.same(
memo.get.byDigest(CACHE, INTEGRITY),
CONTENT,
'content memoized by digest, too'
)
t.same(
memo.get('whatev', KEY),
null,
'entry memoization filtered by cache'
)
await fs.rm(CACHE, { recursive: true, force: true })
await t.resolveMatch(
streamGet(false, CACHE, KEY),
{
metadata: METADATA,
data: CONTENT,
integrity: INTEGRITY,
size: SIZE,
},
'key fetch fulfilled by memoization cache'
)
await t.resolveMatch(
streamGet(true, CACHE, INTEGRITY),
{ data: CONTENT },
'digest fetch fulfilled by memoization cache'
)
await t.rejects(
streamGet(false, CACHE, KEY, { memoize: false }),
{ code: 'ENOENT' },
'key get memoization bypassed'
)
await t.rejects(
streamGet(true, CACHE, INTEGRITY, { memoize: false }),
{ code: 'ENOENT' },
'digest get memoization bypassed'
)
})
2024-06-15 17:16:15 +08:00
t.test('get.info uses memoized data', async t => {
memo.clearMemoized()
2024-06-15 17:16:15 +08:00
const CACHE = t.testdir()
const ENTRY = {
key: KEY,
integrity: INTEGRITY,
time: +new Date(),
size: SIZE,
2024-06-15 17:16:15 +08:00
metadata: null,
}
memo.put(CACHE, ENTRY, CONTENT)
2024-06-15 17:16:15 +08:00
const info = await get.info(CACHE, KEY)
t.same(info, ENTRY, 'got the entry from memoization cache')
})
2024-06-15 17:16:15 +08:00
t.test('identical hashes with different algorithms do not conflict')
t.test('throw error if something is really wrong with bucket')