@@ -5,18 +5,8 @@ import matter from 'gray-matter';
5
5
import { s } from 'hastscript' ;
6
6
import yaml from 'js-yaml' ;
7
7
import { bundleMDX } from 'mdx-bundler' ;
8
- import { createReadStream , createWriteStream , mkdirSync } from 'node:fs' ;
9
8
import { access , opendir , readFile } from 'node:fs/promises' ;
10
9
import path from 'node:path' ;
11
- // @ts -expect-error ts(2305) -- For some reason "compose" is not recognized in the types
12
- import { compose , Readable } from 'node:stream' ;
13
- import { json } from 'node:stream/consumers' ;
14
- import { pipeline } from 'node:stream/promises' ;
15
- import {
16
- constants as zlibConstants ,
17
- createBrotliCompress ,
18
- createBrotliDecompress ,
19
- } from 'node:zlib' ;
20
10
import { limitFunction } from 'p-limit' ;
21
11
import rehypeAutolinkHeadings from 'rehype-autolink-headings' ;
22
12
import rehypePresetMinify from 'rehype-preset-minify' ;
@@ -60,33 +50,6 @@ const root = process.cwd();
60
50
// Functions which looks like AWS Lambda and we get `EMFILE` errors when trying to open
61
51
// so many files at once.
62
52
const FILE_CONCURRENCY_LIMIT = 200 ;
63
- const CACHE_COMPRESS_LEVEL = 4 ;
64
- const CACHE_DIR = path . join ( root , '.next' , 'cache' , 'mdx-bundler' ) ;
65
- mkdirSync ( CACHE_DIR , { recursive : true } ) ;
66
-
67
- const md5 = ( data : BinaryLike ) => createHash ( 'md5' ) . update ( data ) . digest ( 'hex' ) ;
68
-
69
- async function readCacheFile < T > ( file : string ) : Promise < T > {
70
- const reader = createReadStream ( file ) ;
71
- const decompressor = createBrotliDecompress ( ) ;
72
-
73
- return ( await json ( compose ( reader , decompressor ) ) ) as T ;
74
- }
75
-
76
- async function writeCacheFile ( file : string , data : string ) {
77
- await pipeline (
78
- Readable . from ( data ) ,
79
- createBrotliCompress ( {
80
- chunkSize : 32 * 1024 ,
81
- params : {
82
- [ zlibConstants . BROTLI_PARAM_MODE ] : zlibConstants . BROTLI_MODE_TEXT ,
83
- [ zlibConstants . BROTLI_PARAM_QUALITY ] : CACHE_COMPRESS_LEVEL ,
84
- [ zlibConstants . BROTLI_PARAM_SIZE_HINT ] : data . length ,
85
- } ,
86
- } ) ,
87
- createWriteStream ( file )
88
- ) ;
89
- }
90
53
91
54
function formatSlug ( slug : string ) {
92
55
return slug . replace ( / \. ( m d x | m d ) / , '' ) ;
@@ -523,25 +486,6 @@ export async function getFileBySlug(slug: string): Promise<SlugFile> {
523
486
) ;
524
487
}
525
488
526
- let cacheKey : string | null = null ;
527
- let cacheFile : string | null = null ;
528
-
529
- if ( process . env . CI === '1' ) {
530
- cacheKey = md5 ( source ) ;
531
- cacheFile = path . join ( CACHE_DIR , cacheKey ) ;
532
-
533
- try {
534
- const cached = await readCacheFile < SlugFile > ( cacheFile ) ;
535
- return cached ;
536
- } catch ( err ) {
537
- if ( err . code !== 'ENOENT' && err . code !== 'ABORT_ERR' ) {
538
- // If cache is corrupted, ignore and proceed
539
- // eslint-disable-next-line no-console
540
- console . warn ( `Failed to read MDX cache: ${ cacheFile } ` , err ) ;
541
- }
542
- }
543
- }
544
-
545
489
process . env . ESBUILD_BINARY_PATH = path . join (
546
490
root ,
547
491
'node_modules' ,
@@ -667,13 +611,6 @@ export async function getFileBySlug(slug: string): Promise<SlugFile> {
667
611
} ,
668
612
} ;
669
613
670
- if ( cacheFile ) {
671
- writeCacheFile ( cacheFile , JSON . stringify ( resultObj ) ) . catch ( e => {
672
- // eslint-disable-next-line no-console
673
- console . warn ( `Failed to write MDX cache: ${ cacheFile } ` , e ) ;
674
- } ) ;
675
- }
676
-
677
614
return resultObj ;
678
615
}
679
616
0 commit comments