-
Notifications
You must be signed in to change notification settings - Fork 28.2k
/
Copy pathdefault.ts
162 lines (130 loc) · 4.29 KB
/
default.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
/**
* This is the default "use cache" handler it defaults to an in-memory store.
* In-memory caches are fragile and should not use stale-while-revalidate
* semantics on the caches because it's not worth warming up an entry that's
* likely going to get evicted before we get to use it anyway. However, we also
* don't want to reuse a stale entry for too long so stale entries should be
* considered expired/missing in such cache handlers.
*/
import { LRUCache } from '../lru-cache'
import type { CacheEntry, CacheHandlerV2 } from './types'
import {
isStale,
tagsManifest,
} from '../incremental-cache/tags-manifest.external'
type PrivateCacheEntry = {
entry: CacheEntry
// For the default cache we store errored cache
// entries and allow them to be used up to 3 times
// after that we want to dispose it and try for fresh
// If an entry is errored we return no entry
// three times so that we retry hitting origin (MISS)
// and then if it still fails to set after the third we
// return the errored content and use expiration of
// Math.min(30, entry.expiration)
isErrored: boolean
errorRetryCount: number
// compute size on set since we need to read size
// of the ReadableStream for LRU evicting
size: number
}
// LRU cache default to max 50 MB but in future track
const memoryCache = new LRUCache<PrivateCacheEntry>(
50 * 1024 * 1024,
(entry) => entry.size
)
const pendingSets = new Map<string, Promise<void>>()
const debug = process.env.NEXT_PRIVATE_DEBUG_CACHE
? console.debug.bind(console, 'DefaultCacheHandler:')
: undefined
const DefaultCacheHandler: CacheHandlerV2 = {
async get(cacheKey) {
const pendingPromise = pendingSets.get(cacheKey)
if (pendingPromise) {
debug?.('get', cacheKey, 'pending')
await pendingPromise
}
const privateEntry = memoryCache.get(cacheKey)
if (!privateEntry) {
debug?.('get', cacheKey, 'not found')
return undefined
}
const entry = privateEntry.entry
if (
performance.timeOrigin + performance.now() >
entry.timestamp + entry.revalidate * 1000
) {
// In-memory caches should expire after revalidate time because it is
// unlikely that a new entry will be able to be used before it is dropped
// from the cache.
debug?.('get', cacheKey, 'expired')
return undefined
}
if (isStale(entry.tags, entry.timestamp)) {
debug?.('get', cacheKey, 'had stale tag')
return undefined
}
const [returnStream, newSaved] = entry.value.tee()
entry.value = newSaved
debug?.('get', cacheKey, 'found', {
tags: entry.tags,
timestamp: entry.timestamp,
revalidate: entry.revalidate,
expire: entry.expire,
})
return {
...entry,
value: returnStream,
}
},
async set(cacheKey, pendingEntry) {
debug?.('set', cacheKey, 'start')
let resolvePending: () => void = () => {}
const pendingPromise = new Promise<void>((resolve) => {
resolvePending = resolve
})
pendingSets.set(cacheKey, pendingPromise)
const entry = await pendingEntry
let size = 0
try {
const [value, clonedValue] = entry.value.tee()
entry.value = value
const reader = clonedValue.getReader()
for (let chunk; !(chunk = await reader.read()).done; ) {
size += Buffer.from(chunk.value).byteLength
}
memoryCache.set(cacheKey, {
entry,
isErrored: false,
errorRetryCount: 0,
size,
})
debug?.('set', cacheKey, 'done')
} catch (err) {
// TODO: store partial buffer with error after we retry 3 times
debug?.('set', cacheKey, 'failed', err)
} finally {
resolvePending()
pendingSets.delete(cacheKey)
}
},
async refreshTags() {
// Nothing to do for an in-memory cache handler.
},
async getExpiration(...tags) {
const expiration = Math.max(
...tags.map((tag) => tagsManifest.get(tag) ?? 0)
)
debug?.('getExpiration', { tags, expiration })
return expiration
},
async expireTags(...tags) {
const timestamp = Math.round(performance.timeOrigin + performance.now())
debug?.('expireTags', { tags, timestamp })
for (const tag of tags) {
// TODO: update file-system-cache?
tagsManifest.set(tag, timestamp)
}
},
}
export default DefaultCacheHandler