feat(bun-compress): Created a compress middleware that supports Bun (#1153)
* feat(bun-compress): Created a compress middlware that supports Bun * fix(bun-compress): spelling mistake in readme * fix(bun-compress): package description * fixed package.jsonpull/1158/head
parent
026e1deac3
commit
7717755dd3
|
@ -0,0 +1,5 @@
|
||||||
|
---
|
||||||
|
'@hono/bun-compress': minor
|
||||||
|
---
|
||||||
|
|
||||||
|
Created Bun Compress Middleware
|
|
@ -0,0 +1,30 @@
|
||||||
|
# Bun Compress Middleware for Hono
|
||||||
|
|
||||||
|
Bun does not currently support the [CompressionStream API](https://developer.mozilla.org/en-US/docs/Web/API/CompressionStream) so this middleware replicates the behavior of [`hono/compress`](https://hono.dev/docs/middleware/builtin/compress) using the Zlib library. This middleware will be deprecated once [Bun adds support for `CompressionStream`](https://github.com/oven-sh/bun/issues/1723).
|
||||||
|
|
||||||
|
This middleware will use `hono/compress` if CompressionStream is available so you can use this middleware in Bun and Node.js without any changes.
|
||||||
|
|
||||||
|
## Import
|
||||||
|
|
||||||
|
```ts
|
||||||
|
import { Hono } from 'hono'
|
||||||
|
import { compress } from '@hono/bun-compress'
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```ts
|
||||||
|
const app = new Hono()
|
||||||
|
|
||||||
|
app.use(compress())
|
||||||
|
```
|
||||||
|
|
||||||
|
## Options
|
||||||
|
|
||||||
|
### <Badge type="info" text="optional" /> encoding: `'gzip'` | `'deflate'`
|
||||||
|
|
||||||
|
The compression scheme to allow for response compression. Either `gzip` or `deflate`. If not defined, both are allowed and will be used based on the `Accept-Encoding` header. `gzip` is prioritized if this option is not provided and the client provides both in the `Accept-Encoding` header.
|
||||||
|
|
||||||
|
### <Badge type="info" text="optional" /> threshold: `number`
|
||||||
|
|
||||||
|
The minimum size in bytes to compress. Defaults to 1024 bytes.
|
|
@ -0,0 +1,53 @@
|
||||||
|
{
|
||||||
|
"name": "@hono/bun-compress",
|
||||||
|
"version": "0.0.0",
|
||||||
|
"description": "A Hono middleware for compressing responses using Bun's built-in compression.",
|
||||||
|
"type": "module",
|
||||||
|
"module": "dist/index.js",
|
||||||
|
"types": "dist/index.d.ts",
|
||||||
|
"files": [
|
||||||
|
"dist"
|
||||||
|
],
|
||||||
|
"scripts": {
|
||||||
|
"build": "tsup ./src/index.ts",
|
||||||
|
"prepack": "yarn build",
|
||||||
|
"publint": "attw --pack && publint",
|
||||||
|
"typecheck": "tsc -b tsconfig.json",
|
||||||
|
"test": "bun test"
|
||||||
|
},
|
||||||
|
"exports": {
|
||||||
|
".": {
|
||||||
|
"import": {
|
||||||
|
"types": "./dist/index.d.ts",
|
||||||
|
"default": "./dist/index.js"
|
||||||
|
},
|
||||||
|
"require": {
|
||||||
|
"types": "./dist/index.d.cts",
|
||||||
|
"default": "./dist/index.cjs"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"license": "MIT",
|
||||||
|
"publishConfig": {
|
||||||
|
"registry": "https://registry.npmjs.org",
|
||||||
|
"access": "public"
|
||||||
|
},
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git+https://github.com/honojs/middleware.git",
|
||||||
|
"directory": "packages/bun-compress"
|
||||||
|
},
|
||||||
|
"homepage": "https://github.com/honojs/middleware",
|
||||||
|
"peerDependencies": {
|
||||||
|
"hono": "*"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@arethetypeswrong/cli": "^0.17.4",
|
||||||
|
"@types/bun": "^1.2.12",
|
||||||
|
"@types/node": "^22.15.15",
|
||||||
|
"publint": "^0.3.9",
|
||||||
|
"tsup": "^8.4.0",
|
||||||
|
"typescript": "^5.8.2",
|
||||||
|
"vitest": "^3.0.8"
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,231 @@
|
||||||
|
import { Hono } from 'hono'
|
||||||
|
import { stream, streamSSE } from 'hono/streaming'
|
||||||
|
import { Readable } from 'node:stream'
|
||||||
|
import type { ReadableStream } from 'node:stream/web'
|
||||||
|
import { createGunzip } from 'node:zlib'
|
||||||
|
import { compress } from '.'
|
||||||
|
|
||||||
|
describe('Bun Compress Middleware', () => {
|
||||||
|
const app = new Hono()
|
||||||
|
|
||||||
|
// Apply compress middleware to all routes
|
||||||
|
app.use('*', compress())
|
||||||
|
|
||||||
|
// Test routes
|
||||||
|
app.get('/small', (c) => {
|
||||||
|
c.header('Content-Type', 'text/plain')
|
||||||
|
c.header('Content-Length', '5')
|
||||||
|
return c.text('small')
|
||||||
|
})
|
||||||
|
app.get('/large', (c) => {
|
||||||
|
c.header('Content-Type', 'text/plain')
|
||||||
|
c.header('Content-Length', '1024')
|
||||||
|
return c.text('a'.repeat(1024))
|
||||||
|
})
|
||||||
|
app.get('/small-json', (c) => {
|
||||||
|
c.header('Content-Type', 'application/json')
|
||||||
|
c.header('Content-Length', '26')
|
||||||
|
return c.json({ message: 'Hello, World!' })
|
||||||
|
})
|
||||||
|
app.get('/large-json', (c) => {
|
||||||
|
c.header('Content-Type', 'application/json')
|
||||||
|
c.header('Content-Length', '1024')
|
||||||
|
return c.json({ data: 'a'.repeat(1024), message: 'Large JSON' })
|
||||||
|
})
|
||||||
|
app.get('/no-transform', (c) => {
|
||||||
|
c.header('Content-Type', 'text/plain')
|
||||||
|
c.header('Content-Length', '1024')
|
||||||
|
c.header('Cache-Control', 'no-transform')
|
||||||
|
return c.text('a'.repeat(1024))
|
||||||
|
})
|
||||||
|
app.get('/jpeg-image', (c) => {
|
||||||
|
c.header('Content-Type', 'image/jpeg')
|
||||||
|
c.header('Content-Length', '1024')
|
||||||
|
return c.body(new Uint8Array(1024)) // Simulated JPEG data
|
||||||
|
})
|
||||||
|
app.get('/already-compressed', (c) => {
|
||||||
|
c.header('Content-Type', 'application/octet-stream')
|
||||||
|
c.header('Content-Encoding', 'br')
|
||||||
|
c.header('Content-Length', '1024')
|
||||||
|
return c.body(new Uint8Array(1024)) // Simulated compressed data
|
||||||
|
})
|
||||||
|
app.get('/transfer-encoding-deflate', (c) => {
|
||||||
|
c.header('Content-Type', 'application/octet-stream')
|
||||||
|
c.header('Transfer-Encoding', 'deflate')
|
||||||
|
c.header('Content-Length', '1024')
|
||||||
|
return c.body(new Uint8Array(1024)) // Simulated deflate data
|
||||||
|
})
|
||||||
|
app.get('/chunked', (c) => {
|
||||||
|
c.header('Content-Type', 'application/octet-stream')
|
||||||
|
c.header('Transfer-Encoding', 'chunked')
|
||||||
|
c.header('Content-Length', '1024')
|
||||||
|
return c.body(new Uint8Array(1024)) // Simulated chunked data
|
||||||
|
})
|
||||||
|
app.get('/stream', (c) =>
|
||||||
|
stream(c, async (stream) => {
|
||||||
|
c.header('Content-Type', 'text/plain')
|
||||||
|
// 60000 bytes
|
||||||
|
for (let i = 0; i < 10000; i++) {
|
||||||
|
await stream.write('chunk ')
|
||||||
|
}
|
||||||
|
})
|
||||||
|
)
|
||||||
|
app.get('/already-compressed-stream', (c) =>
|
||||||
|
stream(c, async (stream) => {
|
||||||
|
c.header('Content-Type', 'text/plain')
|
||||||
|
c.header('Content-Encoding', 'br')
|
||||||
|
// 60000 bytes
|
||||||
|
for (let i = 0; i < 10000; i++) {
|
||||||
|
await stream.write(new Uint8Array([0, 1, 2, 3, 4, 5])) // Simulated compressed data
|
||||||
|
}
|
||||||
|
})
|
||||||
|
)
|
||||||
|
app.get('/sse', (c) =>
|
||||||
|
streamSSE(c, async (stream) => {
|
||||||
|
for (let i = 0; i < 1000; i++) {
|
||||||
|
await stream.writeSSE({ data: 'chunk' })
|
||||||
|
}
|
||||||
|
})
|
||||||
|
)
|
||||||
|
app.notFound((c) => c.text('Custom NotFound', 404))
|
||||||
|
|
||||||
|
const testCompression = async (
|
||||||
|
path: string,
|
||||||
|
acceptEncoding: string,
|
||||||
|
expectedEncoding: string | null
|
||||||
|
) => {
|
||||||
|
const req = new Request(`http://localhost${path}`, {
|
||||||
|
method: 'GET',
|
||||||
|
headers: new Headers({ 'Accept-Encoding': acceptEncoding }),
|
||||||
|
})
|
||||||
|
const res = await app.request(req)
|
||||||
|
expect(res.headers.get('Content-Encoding')).toBe(expectedEncoding)
|
||||||
|
return res
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('Compression Behavior', () => {
|
||||||
|
it('should compress large responses with gzip', async () => {
|
||||||
|
const res = await testCompression('/large', 'gzip', 'gzip')
|
||||||
|
expect(res.headers.get('Content-Length')).toBeNull()
|
||||||
|
expect((await res.arrayBuffer()).byteLength).toBeLessThan(1024)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should compress large responses with deflate', async () => {
|
||||||
|
const res = await testCompression('/large', 'deflate', 'deflate')
|
||||||
|
expect((await res.arrayBuffer()).byteLength).toBeLessThan(1024)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should prioritize gzip over deflate when both are accepted', async () => {
|
||||||
|
await testCompression('/large', 'gzip, deflate', 'gzip')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should not compress small responses', async () => {
|
||||||
|
const res = await testCompression('/small', 'gzip, deflate', null)
|
||||||
|
expect(res.headers.get('Content-Length')).toBe('5')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should not compress when no Accept-Encoding is provided', async () => {
|
||||||
|
await testCompression('/large', '', null)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should not compress images', async () => {
|
||||||
|
const res = await testCompression('/jpeg-image', 'gzip', null)
|
||||||
|
expect(res.headers.get('Content-Type')).toBe('image/jpeg')
|
||||||
|
expect(res.headers.get('Content-Length')).toBe('1024')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should not compress already compressed responses', async () => {
|
||||||
|
const res = await testCompression('/already-compressed', 'gzip', 'br')
|
||||||
|
expect(res.headers.get('Content-Length')).toBe('1024')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should remove Content-Length when compressing', async () => {
|
||||||
|
const res = await testCompression('/large', 'gzip', 'gzip')
|
||||||
|
expect(res.headers.get('Content-Length')).toBeNull()
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should not remove Content-Length when not compressing', async () => {
|
||||||
|
const res = await testCompression('/jpeg-image', 'gzip', null)
|
||||||
|
expect(res.headers.get('Content-Length')).toBeDefined()
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should not compress transfer-encoding: deflate', async () => {
|
||||||
|
const res = await testCompression('/transfer-encoding-deflate', 'gzip', null)
|
||||||
|
expect(res.headers.get('Content-Length')).toBe('1024')
|
||||||
|
expect(res.headers.get('Transfer-Encoding')).toBe('deflate')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should not compress transfer-encoding: chunked', async () => {
|
||||||
|
const res = await testCompression('/chunked', 'gzip', null)
|
||||||
|
expect(res.headers.get('Content-Length')).toBe('1024')
|
||||||
|
expect(res.headers.get('Transfer-Encoding')).toBe('chunked')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('JSON Handling', () => {
|
||||||
|
it('should not compress small JSON responses', async () => {
|
||||||
|
const res = await testCompression('/small-json', 'gzip', null)
|
||||||
|
expect(res.headers.get('Content-Length')).toBe('26')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should compress large JSON responses', async () => {
|
||||||
|
const res = await testCompression('/large-json', 'gzip', 'gzip')
|
||||||
|
expect(res.headers.get('Content-Length')).toBeNull()
|
||||||
|
const decompressed = await decompressResponse(res)
|
||||||
|
const json = JSON.parse(decompressed)
|
||||||
|
expect(json.data.length).toBe(1024)
|
||||||
|
expect(json.message).toBe('Large JSON')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('Streaming Responses', () => {
|
||||||
|
it('should compress streaming responses written in multiple chunks', async () => {
|
||||||
|
const res = await testCompression('/stream', 'gzip', 'gzip')
|
||||||
|
const decompressed = await decompressResponse(res)
|
||||||
|
expect(decompressed.length).toBe(60000)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should not compress already compressed streaming responses', async () => {
|
||||||
|
const res = await testCompression('/already-compressed-stream', 'gzip', 'br')
|
||||||
|
expect((await res.arrayBuffer()).byteLength).toBe(60000)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should not compress server-sent events', async () => {
|
||||||
|
const res = await testCompression('/sse', 'gzip', null)
|
||||||
|
expect((await res.arrayBuffer()).byteLength).toBe(13000)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('Edge Cases', () => {
|
||||||
|
it('should not compress responses with Cache-Control: no-transform', async () => {
|
||||||
|
await testCompression('/no-transform', 'gzip', null)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should handle HEAD requests without compression', async () => {
|
||||||
|
const req = new Request('http://localhost/large', {
|
||||||
|
method: 'HEAD',
|
||||||
|
headers: new Headers({ 'Accept-Encoding': 'gzip' }),
|
||||||
|
})
|
||||||
|
const res = await app.request(req)
|
||||||
|
expect(res.headers.get('Content-Encoding')).toBeNull()
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should compress custom 404 Not Found responses', async () => {
|
||||||
|
const res = await testCompression('/not-found', 'gzip', 'gzip')
|
||||||
|
expect(res.status).toBe(404)
|
||||||
|
const decompressed = await decompressResponse(res)
|
||||||
|
expect(decompressed).toBe('Custom NotFound')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
async function decompressResponse(res: Response): Promise<string> {
|
||||||
|
const resBody = res.body as ReadableStream
|
||||||
|
const readableStream = Readable.fromWeb(resBody)
|
||||||
|
const decompressedStream = readableStream.pipe(createGunzip())
|
||||||
|
const decompressedReadableStream = Readable.toWeb(decompressedStream)
|
||||||
|
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
|
const decompressedResponse = new Response(decompressedReadableStream as any)
|
||||||
|
return await decompressedResponse.text()
|
||||||
|
}
|
|
@ -0,0 +1,103 @@
|
||||||
|
/**
|
||||||
|
* @module
|
||||||
|
* Compress Middleware for Hono.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import type { MiddlewareHandler } from 'hono'
|
||||||
|
import { compress as originalCompress } from 'hono/compress'
|
||||||
|
import { COMPRESSIBLE_CONTENT_TYPE_REGEX } from 'hono/utils/compress'
|
||||||
|
import { Readable } from 'node:stream'
|
||||||
|
import type { ReadableStream } from 'node:stream/web'
|
||||||
|
import { createDeflate, createGzip } from 'node:zlib'
|
||||||
|
|
||||||
|
const ENCODING_TYPES = ['gzip', 'deflate'] as const
|
||||||
|
const cacheControlNoTransformRegExp = /(?:^|,)\s*?no-transform\s*?(?:,|$)/i
|
||||||
|
|
||||||
|
interface CompressionOptions {
|
||||||
|
encoding?: (typeof ENCODING_TYPES)[number]
|
||||||
|
threshold?: number
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compress Middleware for Hono on Bun.
|
||||||
|
*
|
||||||
|
* Bun does not currently support CompressionStream, so this uses the zlib module to compress the response body.
|
||||||
|
*
|
||||||
|
* @see {@link https://hono.dev/docs/middleware/builtin/compress}
|
||||||
|
* @see {@link https://github.com/oven-sh/bun/issues/1723}
|
||||||
|
*
|
||||||
|
* @param {CompressionOptions} [options] - The options for the compress middleware.
|
||||||
|
* @param {'gzip' | 'deflate'} [options.encoding] - The compression scheme to allow for response compression. Either 'gzip' or 'deflate'. If not defined, both are allowed and will be used based on the Accept-Encoding header. 'gzip' is prioritized if this option is not provided and the client provides both in the Accept-Encoding header.
|
||||||
|
* @param {number} [options.threshold=1024] - The minimum size in bytes to compress. Defaults to 1024 bytes.
|
||||||
|
* @returns {MiddlewareHandler} The middleware handler function.
|
||||||
|
*
|
||||||
|
* @example
|
||||||
|
* ```ts
|
||||||
|
* const app = new Hono()
|
||||||
|
*
|
||||||
|
* app.use(bunCompress())
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
export const compress = (options?: CompressionOptions): MiddlewareHandler => {
|
||||||
|
// Check CompressionStream support
|
||||||
|
if (typeof CompressionStream !== 'undefined') {
|
||||||
|
return originalCompress(options)
|
||||||
|
}
|
||||||
|
|
||||||
|
const threshold = options?.threshold ?? 1024
|
||||||
|
|
||||||
|
return async function compress(ctx, next) {
|
||||||
|
await next()
|
||||||
|
|
||||||
|
const contentLength = ctx.res.headers.get('Content-Length')
|
||||||
|
|
||||||
|
// Check if response should be compressed
|
||||||
|
if (
|
||||||
|
ctx.res.headers.has('Content-Encoding') || // already encoded
|
||||||
|
ctx.res.headers.has('Transfer-Encoding') || // already encoded or chunked
|
||||||
|
ctx.req.method === 'HEAD' || // HEAD request
|
||||||
|
(contentLength && Number(contentLength) < threshold) || // content-length below threshold
|
||||||
|
!shouldCompress(ctx.res) || // not compressible type
|
||||||
|
!shouldTransform(ctx.res) // cache-control: no-transform
|
||||||
|
) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const accepted = ctx.req.header('Accept-Encoding')
|
||||||
|
const encoding =
|
||||||
|
options?.encoding ?? ENCODING_TYPES.find((encoding) => accepted?.includes(encoding))
|
||||||
|
if (!encoding || !ctx.res.body) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Compress the response
|
||||||
|
try {
|
||||||
|
const compressedStream = encoding === 'gzip' ? createGzip() : createDeflate()
|
||||||
|
|
||||||
|
const readableBody = ctx.res.body as ReadableStream
|
||||||
|
const readableStream = Readable.fromWeb(readableBody)
|
||||||
|
const compressedBody = readableStream.pipe(compressedStream)
|
||||||
|
const compressedReadableStream = Readable.toWeb(compressedBody) as ReadableStream<Uint8Array>
|
||||||
|
|
||||||
|
// Create a new response with the compressed body
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
|
ctx.res = new Response(compressedReadableStream as any, ctx.res)
|
||||||
|
ctx.res.headers.delete('Content-Length')
|
||||||
|
ctx.res.headers.set('Content-Encoding', encoding)
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Compression error:', error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const shouldCompress = (res: Response) => {
|
||||||
|
const type = res.headers.get('Content-Type')
|
||||||
|
return type && COMPRESSIBLE_CONTENT_TYPE_REGEX.test(type)
|
||||||
|
}
|
||||||
|
|
||||||
|
const shouldTransform = (res: Response) => {
|
||||||
|
const cacheControl = res.headers.get('Cache-Control')
|
||||||
|
// Don't compress for Cache-Control: no-transform
|
||||||
|
// https://tools.ietf.org/html/rfc7234#section-5.2.2.4
|
||||||
|
return !cacheControl || !cacheControlNoTransformRegExp.test(cacheControl)
|
||||||
|
}
|
|
@ -0,0 +1,20 @@
|
||||||
|
{
|
||||||
|
"extends": "../../tsconfig.base.json",
|
||||||
|
"compilerOptions": {
|
||||||
|
"rootDir": "src",
|
||||||
|
"outDir": "dist",
|
||||||
|
"tsBuildInfoFile": "dist/tsconfig.build.tsbuildinfo",
|
||||||
|
"emitDeclarationOnly": false,
|
||||||
|
"types": [
|
||||||
|
"node",
|
||||||
|
"bun"
|
||||||
|
],
|
||||||
|
},
|
||||||
|
"include": [
|
||||||
|
"src/**/*.ts"
|
||||||
|
],
|
||||||
|
"exclude": [
|
||||||
|
"**/*.test.ts"
|
||||||
|
],
|
||||||
|
"references": []
|
||||||
|
}
|
|
@ -0,0 +1,13 @@
|
||||||
|
{
|
||||||
|
"extends": "../../tsconfig.base.json",
|
||||||
|
"files": [],
|
||||||
|
"include": [],
|
||||||
|
"references": [
|
||||||
|
{
|
||||||
|
"path": "./tsconfig.build.json"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": "./tsconfig.spec.json"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
|
@ -0,0 +1,13 @@
|
||||||
|
{
|
||||||
|
"extends": "../../tsconfig.base.json",
|
||||||
|
"compilerOptions": {
|
||||||
|
"outDir": "../../dist/out-tsc/packages/hello",
|
||||||
|
"types": ["vitest/globals"]
|
||||||
|
},
|
||||||
|
"include": ["**/*.test.ts", "vitest.config.ts"],
|
||||||
|
"references": [
|
||||||
|
{
|
||||||
|
"path": "./tsconfig.build.json"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
|
@ -0,0 +1,7 @@
|
||||||
|
import { defineProject } from 'vitest/config.js'
|
||||||
|
|
||||||
|
export default defineProject({
|
||||||
|
test: {
|
||||||
|
globals: true,
|
||||||
|
},
|
||||||
|
})
|
50
yarn.lock
50
yarn.lock
|
@ -1845,6 +1845,22 @@ __metadata:
|
||||||
languageName: unknown
|
languageName: unknown
|
||||||
linkType: soft
|
linkType: soft
|
||||||
|
|
||||||
|
"@hono/bun-compress@workspace:packages/bun-compress":
|
||||||
|
version: 0.0.0-use.local
|
||||||
|
resolution: "@hono/bun-compress@workspace:packages/bun-compress"
|
||||||
|
dependencies:
|
||||||
|
"@arethetypeswrong/cli": "npm:^0.17.4"
|
||||||
|
"@types/bun": "npm:^1.2.12"
|
||||||
|
"@types/node": "npm:^22.15.15"
|
||||||
|
publint: "npm:^0.3.9"
|
||||||
|
tsup: "npm:^8.4.0"
|
||||||
|
typescript: "npm:^5.8.2"
|
||||||
|
vitest: "npm:^3.0.8"
|
||||||
|
peerDependencies:
|
||||||
|
hono: "*"
|
||||||
|
languageName: unknown
|
||||||
|
linkType: soft
|
||||||
|
|
||||||
"@hono/bun-transpiler@workspace:packages/bun-transpiler":
|
"@hono/bun-transpiler@workspace:packages/bun-transpiler":
|
||||||
version: 0.0.0-use.local
|
version: 0.0.0-use.local
|
||||||
resolution: "@hono/bun-transpiler@workspace:packages/bun-transpiler"
|
resolution: "@hono/bun-transpiler@workspace:packages/bun-transpiler"
|
||||||
|
@ -3776,6 +3792,15 @@ __metadata:
|
||||||
languageName: node
|
languageName: node
|
||||||
linkType: hard
|
linkType: hard
|
||||||
|
|
||||||
|
"@types/bun@npm:^1.2.12":
|
||||||
|
version: 1.2.12
|
||||||
|
resolution: "@types/bun@npm:1.2.12"
|
||||||
|
dependencies:
|
||||||
|
bun-types: "npm:1.2.12"
|
||||||
|
checksum: 168578a09e9408792c44363957b45d6f574b684a87cfb1ec350131a868e5001b6ce5006a1503846ff1afb7cd827911c5538ec27ba24cbc287e5f200b748ebe30
|
||||||
|
languageName: node
|
||||||
|
linkType: hard
|
||||||
|
|
||||||
"@types/caseless@npm:*":
|
"@types/caseless@npm:*":
|
||||||
version: 0.12.5
|
version: 0.12.5
|
||||||
resolution: "@types/caseless@npm:0.12.5"
|
resolution: "@types/caseless@npm:0.12.5"
|
||||||
|
@ -3917,6 +3942,15 @@ __metadata:
|
||||||
languageName: node
|
languageName: node
|
||||||
linkType: hard
|
linkType: hard
|
||||||
|
|
||||||
|
"@types/node@npm:^22.15.15":
|
||||||
|
version: 22.15.15
|
||||||
|
resolution: "@types/node@npm:22.15.15"
|
||||||
|
dependencies:
|
||||||
|
undici-types: "npm:~6.21.0"
|
||||||
|
checksum: 3b0c12531c9057ddcbe1cdd869ca6c2f3ea753f1cb3de0a482ca70c3d86f0dbb1d28339aecd8a0c133febda4f4355c012ed9be6ab866297fc23db360fa218dde
|
||||||
|
languageName: node
|
||||||
|
linkType: hard
|
||||||
|
|
||||||
"@types/prop-types@npm:*":
|
"@types/prop-types@npm:*":
|
||||||
version: 15.7.14
|
version: 15.7.14
|
||||||
resolution: "@types/prop-types@npm:15.7.14"
|
resolution: "@types/prop-types@npm:15.7.14"
|
||||||
|
@ -5001,6 +5035,15 @@ __metadata:
|
||||||
languageName: node
|
languageName: node
|
||||||
linkType: hard
|
linkType: hard
|
||||||
|
|
||||||
|
"bun-types@npm:1.2.12":
|
||||||
|
version: 1.2.12
|
||||||
|
resolution: "bun-types@npm:1.2.12"
|
||||||
|
dependencies:
|
||||||
|
"@types/node": "npm:*"
|
||||||
|
checksum: 9e6f421f82164f39ada25f202969f267609e477286817395420cf2b0f4d9e98d2243ec5e73f564a4448941d6706a53329e2fbdbdac7e2e6a32f6882ec63eddae
|
||||||
|
languageName: node
|
||||||
|
linkType: hard
|
||||||
|
|
||||||
"bun-types@npm:1.2.5":
|
"bun-types@npm:1.2.5":
|
||||||
version: 1.2.5
|
version: 1.2.5
|
||||||
resolution: "bun-types@npm:1.2.5"
|
resolution: "bun-types@npm:1.2.5"
|
||||||
|
@ -14038,6 +14081,13 @@ __metadata:
|
||||||
languageName: node
|
languageName: node
|
||||||
linkType: hard
|
linkType: hard
|
||||||
|
|
||||||
|
"undici-types@npm:~6.21.0":
|
||||||
|
version: 6.21.0
|
||||||
|
resolution: "undici-types@npm:6.21.0"
|
||||||
|
checksum: c01ed51829b10aa72fc3ce64b747f8e74ae9b60eafa19a7b46ef624403508a54c526ffab06a14a26b3120d055e1104d7abe7c9017e83ced038ea5cf52f8d5e04
|
||||||
|
languageName: node
|
||||||
|
linkType: hard
|
||||||
|
|
||||||
"undici@npm:*":
|
"undici@npm:*":
|
||||||
version: 7.5.0
|
version: 7.5.0
|
||||||
resolution: "undici@npm:7.5.0"
|
resolution: "undici@npm:7.5.0"
|
||||||
|
|
Loading…
Reference in New Issue