1
1
import assert from 'node:assert'
2
- import { LRUCache } from 'lru-cache'
3
2
import { DecoratorHandler , parseHeaders , parseCacheControl } from '../utils.js'
3
+ import { DatabaseSync } from 'node:sqlite' // --experimental-sqlite
4
+ import * as BJSON from 'buffer-json'
4
5
5
6
class CacheHandler extends DecoratorHandler {
6
7
#handler
7
8
#store
8
9
#key
10
+ #opts
9
11
#value
10
12
11
- constructor ( { key, handler, store } ) {
13
+ constructor ( { key, handler, store, opts } ) {
12
14
super ( handler )
13
15
14
16
this . #key = key
15
17
this . #handler = handler
16
18
this . #store = store
19
+ this . #opts = opts
17
20
}
18
21
19
22
onConnect ( abort ) {
20
- console . log ( 'onConnect abort' )
21
- console . log ( abort )
22
-
23
23
this . #value = null
24
24
25
25
return this . #handler. onConnect ( abort )
26
26
}
27
27
28
28
onHeaders ( statusCode , rawHeaders , resume , statusMessage , headers = parseHeaders ( rawHeaders ) ) {
29
- console . log ( 'onHeaders' )
30
- console . log ( { statusCode, rawHeaders, resume, statusMessage, headers } )
31
-
32
- if ( statusCode !== 307 ) {
29
+ if ( statusCode !== 307 || statusCode !== 200 ) {
33
30
return this . #handler. onHeaders ( statusCode , rawHeaders , resume , statusMessage , headers )
34
31
}
35
32
36
- // TODO (fix): Support vary header.
37
33
const cacheControl = parseCacheControl ( headers [ 'cache-control' ] )
38
-
39
34
const contentLength = headers [ 'content-length' ] ? Number ( headers [ 'content-length' ] ) : Infinity
40
35
const maxEntrySize = this . #store. maxEntrySize ?? Infinity
41
36
42
- console . log ( { cacheControl, contentLength, maxEntrySize } )
43
-
44
- console . log ( 'onHeaders if statement match:' )
45
-
46
- console . log (
47
- contentLength < maxEntrySize &&
48
- cacheControl &&
49
- cacheControl . public &&
50
- ! cacheControl . private &&
51
- ! cacheControl [ 'no-store' ] &&
52
- ! cacheControl [ 'no-cache' ] &&
53
- ! cacheControl [ 'must-understand' ] &&
54
- ! cacheControl [ 'must-revalidate' ] &&
55
- ! cacheControl [ 'proxy-revalidate' ] ,
56
- )
57
-
58
37
if (
59
38
contentLength < maxEntrySize &&
60
39
cacheControl &&
@@ -73,8 +52,6 @@ class CacheHandler extends DecoratorHandler {
73
52
? 31556952 // 1 year
74
53
: Number ( maxAge )
75
54
76
- console . log ( { ttl, maxAge, cacheControl, contentLength, maxEntrySize } )
77
-
78
55
if ( ttl > 0 ) {
79
56
this . #value = {
80
57
data : {
@@ -88,16 +65,10 @@ class CacheHandler extends DecoratorHandler {
88
65
( rawHeaders ?. reduce ( ( xs , x ) => xs + x . length , 0 ) ?? 0 ) +
89
66
( statusMessage ?. length ?? 0 ) +
90
67
64 ,
91
- ttl : ttl * 1e3 ,
68
+ expires : Date . now ( ) + ttl ,
92
69
}
93
70
}
94
-
95
- console . log ( { thisvalue : this . #value } )
96
71
}
97
-
98
- console . log ( 'onHeaders, finish:' )
99
- console . log ( { statusCode, rawHeaders, resume, statusMessage, headers } )
100
-
101
72
return this . #handler. onHeaders ( statusCode , rawHeaders , resume , statusMessage , headers )
102
73
}
103
74
@@ -116,34 +87,25 @@ class CacheHandler extends DecoratorHandler {
116
87
}
117
88
118
89
onComplete ( rawTrailers ) {
119
- console . log ( 'onComplete this:' )
120
- console . log ( { thisvalue : this . #value } )
121
- console . log ( { thisstore : this . #store } ) // CacheStore{}
122
- console . log ( { thishandler : this . #handler } ) // RequestHandler{}
123
- console . log ( { thishandlervalue : this . #handler. value } )
124
- console . log ( { this : this } )
125
90
if ( this . #value) {
126
- this . #value. data . rawTrailers = rawTrailers
127
- this . #value. size += rawTrailers ?. reduce ( ( xs , x ) => xs + x . length , 0 ) ?? 0
128
-
129
- const opts = this . #handler. opts
130
- const entries = this . #handler. entries
131
- console . log ( 'onComplete this:' )
132
- console . log ( { opts, entries } )
133
-
134
- const reqHeaders = this . #handler. opts
91
+ const reqHeaders = this . #opts
135
92
const resHeaders = parseHeaders ( this . #value. data . rawHeaders )
136
93
137
- const vary = formatVaryData ( resHeaders , reqHeaders )
138
-
139
- console . log ( { vary } )
94
+ // Early return if Vary = *, uncacheable.
95
+ if ( resHeaders . vary === '*' ) {
96
+ return this . #handler. onComplete ( rawTrailers )
97
+ }
140
98
141
- this . #value. vary = vary
99
+ this . #value. data . rawTrailers = rawTrailers
100
+ this . #value. size = this . #value. size
101
+ ? this . #value. size + rawTrailers ?. reduce ( ( xs , x ) => xs + x . length , 0 )
102
+ : 0
142
103
143
- console . log ( { entries } )
104
+ this . #value . vary = formatVaryData ( resHeaders , reqHeaders )
144
105
145
- this . #store. set ( this . #key, entries . push ( this . #value) )
106
+ this . #store. set ( this . #key, this . #value)
146
107
}
108
+
147
109
return this . #handler. onComplete ( rawTrailers )
148
110
}
149
111
}
@@ -152,61 +114,86 @@ function formatVaryData(resHeaders, reqHeaders) {
152
114
return resHeaders . vary
153
115
?. split ( ',' )
154
116
. map ( ( key ) => key . trim ( ) . toLowerCase ( ) )
155
- . map ( ( key ) => [ key , reqHeaders [ key ] ] )
117
+ . map ( ( key ) => [ key , reqHeaders [ key ] ?? '' ] )
118
+ . filter ( ( [ , val ] ) => val )
156
119
}
157
120
158
- // TODO (fix): Async filesystem cache.
159
- class CacheStore {
160
- constructor ( { maxSize = 1024 * 1024 , maxEntrySize = 128 * 1024 } ) {
161
- this . maxSize = maxSize
162
- this . maxEntrySize = maxEntrySize
163
- this . cache = new LRUCache ( { maxSize } )
121
+ export class CacheStore {
122
+ #database
123
+
124
+ #insertquery
125
+ #getQuery
126
+ #purgeQuery
127
+
128
+ #size = 0
129
+ #maxSize = 128e9
130
+
131
+ constructor ( location = ':memory:' , opts ) {
132
+ // TODO (fix): Validate args...
133
+
134
+ this . #maxSize = opts . maxSize ?? this . #maxSize
135
+ this . #database = new DatabaseSync ( location )
136
+
137
+ this . #database. exec ( `
138
+ CREATE TABLE IF NOT EXISTS cacheInterceptor(
139
+ key TEXT,
140
+ data TEXT,
141
+ vary TEXT,
142
+ size INTEGER,
143
+ expires INTEGER
144
+ ) STRICT
145
+ ` )
146
+
147
+ this . #insertquery = this . #database. prepare (
148
+ 'INSERT INTO cacheInterceptor (key, data, vary, size, expires) VALUES (?, ?, ?, ?, ?)' ,
149
+ )
150
+
151
+ this . #getQuery = this . #database. prepare (
152
+ 'SELECT * FROM cacheInterceptor WHERE key = ? AND expires > ? ' ,
153
+ )
154
+
155
+ this . #purgeQuery = this . #database. prepare ( 'DELETE FROM cacheInterceptor WHERE expires < ?' )
156
+
157
+ this . #maybePurge( )
164
158
}
165
159
166
- set ( key , value , opts ) {
167
- this . cache . set ( key , value , opts )
160
+ set ( key , { data, vary, size, expires } ) {
161
+ this . #insertquery. run ( key , JSON . stringify ( data ) , BJSON . stringify ( vary ) , size , expires )
162
+
163
+ this . #size += size
164
+ this . #maybePurge( )
168
165
}
169
166
170
167
get ( key ) {
171
- return this . cache . get ( key )
168
+ return this . #getQuery. all ( key , Date . now ( ) ) . map ( ( { data, vary, size, expires } ) => ( {
169
+ data : BJSON . parse ( data ) ,
170
+ vary : JSON . parse ( vary ) ,
171
+ size : parseInt ( size ) , // TODO (fix): Is parseInt necessary?
172
+ expores : parseInt ( expires ) , // TODO (fix): Is parseInt necessary?
173
+ } ) )
172
174
}
173
- }
174
-
175
- function findEntryByHeaders ( entries , reqHeaders ) {
176
- // Sort entries by number of vary headers in descending order, because
177
- // we want to compare the most complex response to the request first.
178
- entries . sort ( ( a , b ) => {
179
- const lengthA = a . vary ? a . vary . length : 0
180
- const lengthB = b . vary ? b . vary . length : 0
181
- return lengthB - lengthA
182
- } )
183
175
184
- console . log ( 'Sort entries' )
185
- console . log ( { entries } )
176
+ close ( ) {
177
+ this . #database. close ( )
178
+ }
186
179
187
- console . log ( 'reqHeaders' )
188
- console . log ( { reqHeaders } )
180
+ #maybePurge( ) {
181
+ if ( this . #size == null || this . #size > this . #maxSize) {
182
+ this . #purgeQuery. run ( Date . now ( ) )
183
+ this . #size = this . #database. exec ( 'SELECT SUM(size) FROM cacheInterceptor' ) [ 0 ] . values [ 0 ] [ 0 ]
184
+ }
185
+ }
186
+ }
189
187
188
+ function findEntryByHeaders ( entries , reqHeaders ) {
190
189
return entries ?. find (
191
- ( entry ) =>
192
- entry . vary ?. every ( ( [ key , val ] ) => {
193
- console . log ( `reqHeaders[${ key } ] === ${ val } ` )
194
- console . log ( { reqHeadersval : reqHeaders [ key ] } )
195
- return reqHeaders [ key ] === val
196
- } ) ?? true ,
190
+ ( entry ) => entry . vary ?. every ( ( [ key , val ] ) => reqHeaders ?. headers [ key ] === val ) ?? true ,
197
191
)
198
192
}
199
193
200
- const DEFAULT_CACHE_STORE = new CacheStore ( { maxSize : 128 * 1024 , maxEntrySize : 1024 } )
194
+ const DEFAULT_CACHE_STORE = new CacheStore ( )
201
195
202
196
export default ( opts ) => ( dispatch ) => ( opts , handler ) => {
203
- console . log ( 'cache dispatcher:' )
204
- console . log ( dispatch )
205
- console . log ( 'opts:' )
206
- console . log ( opts )
207
- console . log ( 'handler:' )
208
- console . log ( handler )
209
-
210
197
if ( ! opts . cache || opts . upgrade ) {
211
198
return dispatch ( opts , handler )
212
199
}
@@ -235,85 +222,27 @@ export default (opts) => (dispatch) => (opts, handler) => {
235
222
// Dump body...
236
223
opts . body ?. on ( 'error' , ( ) => { } ) . resume ( )
237
224
225
+ opts . host = opts . host ?? new URL ( opts . origin ) . host
226
+
227
+ if ( ! opts . headers ) {
228
+ opts . headers = { }
229
+ }
230
+
231
+ // idea: use DEFAULT_CACHE_STORE by default if 'cache' not specified, since the cache interceptor was already specified to be used.
238
232
const store = opts . cache === true ? DEFAULT_CACHE_STORE : opts . cache
239
233
240
234
if ( ! store ) {
241
235
throw new Error ( `Cache store not provided.` )
242
236
}
243
237
244
- let key = `${ opts . method } :${ opts . path } `
245
- console . log ( 'getting key: ' + key )
246
- let entries = store . get ( key )
238
+ const key = `${ opts . method } :${ opts . path } `
247
239
248
- if ( Array . isArray ( entries ) && entries . length === 0 && opts . method === 'HEAD' ) {
249
- key = `GET:${ opts . path } `
250
- entries = store . get ( key )
251
- }
240
+ const entries = store . get ( key ) ?? ( opts . method === 'HEAD' ? store . get ( `GET:${ opts . path } ` ) : null )
252
241
253
- // testing
254
- const rawHeaders = [
255
- Buffer . from ( 'Content-Type' ) ,
256
- Buffer . from ( 'application/json' ) ,
257
- Buffer . from ( 'Content-Length' ) ,
258
- Buffer . from ( '10' ) ,
259
- Buffer . from ( 'Cache-Control' ) ,
260
- Buffer . from ( 'public' ) ,
261
- ]
262
- // // cannot get the cache to work inside the test, so I hardcode the entries here
263
- entries = [
264
- {
265
- statusCode : 200 ,
266
- statusMessage : '' ,
267
- rawHeaders,
268
- rawTrailers : [ 'Hello' ] ,
269
- body : [ 'asd1' ] ,
270
- vary : [
271
- [ 'Accept' , 'application/xml' ] ,
272
- [ 'User-Agent' , 'Mozilla/5.0' ] ,
273
- ] ,
274
- } ,
275
- {
276
- statusCode : 200 ,
277
- statusMessage : '' ,
278
- rawHeaders,
279
- rawTrailers : [ 'Hello' ] ,
280
- body : [ 'asd2' ] ,
281
- vary : [
282
- [ 'Accept' , 'application/txt' ] ,
283
- [ 'User-Agent' , 'Chrome' ] ,
284
- [ 'origin2' , 'www.google.com/images' ] ,
285
- ] ,
286
- } ,
287
- // {
288
- // statusCode: 200, statusMessage: 'last', rawHeaders, rawTrailers: ['Hello'], body: ['asd3'],
289
- // vary: null },
290
- {
291
- statusCode : 200 ,
292
- statusMessage : 'first' ,
293
- rawHeaders,
294
- rawTrailers : [ 'Hello' ] ,
295
- body : [ 'asd4' ] ,
296
- vary : [
297
- [ 'Accept' , 'application/json' ] ,
298
- [ 'User-Agent' , 'Mozilla/5.0' ] ,
299
- [ 'host2' , 'www.google.com' ] ,
300
- [ 'origin2' , 'www.google.com/images' ] ,
301
- ] ,
302
- } ,
303
- ]
304
-
305
- // *testing
306
-
307
- // Find an entry that matches the request, if any
308
242
const entry = findEntryByHeaders ( entries , opts )
309
243
310
- console . log ( 'Entry found:' )
311
- console . log ( { entry } )
312
-
313
- // handler.value.vary = 'foobar'
314
-
315
244
if ( entry ) {
316
- const { statusCode, statusMessage, rawHeaders, rawTrailers, body } = entry
245
+ const { statusCode, statusMessage, rawHeaders, rawTrailers, body } = entry . data
317
246
const ac = new AbortController ( )
318
247
const signal = ac . signal
319
248
@@ -325,11 +254,14 @@ export default (opts) => (dispatch) => (opts, handler) => {
325
254
try {
326
255
handler . onConnect ( abort )
327
256
signal . throwIfAborted ( )
257
+
328
258
handler . onHeaders ( statusCode , rawHeaders , resume , statusMessage )
329
259
signal . throwIfAborted ( )
260
+
330
261
if ( opts . method !== 'HEAD' ) {
331
262
for ( const chunk of body ) {
332
263
const ret = handler . onData ( chunk )
264
+
333
265
signal . throwIfAborted ( )
334
266
if ( ret === false ) {
335
267
// TODO (fix): back pressure...
@@ -345,8 +277,6 @@ export default (opts) => (dispatch) => (opts, handler) => {
345
277
346
278
return true
347
279
} else {
348
- // handler.opts = opts
349
- // handler.entries = entries
350
- return dispatch ( opts , new CacheHandler ( { handler, store, key } ) )
280
+ return dispatch ( opts , new CacheHandler ( { handler, store, key, opts } ) )
351
281
}
352
282
}
0 commit comments