a07b21151f
* Micro optimization in request proxying * Request NSID parsing optimization * DID document parsing optimization * remove un-necessary call to next() * Allow HandlerPipeThrough to be used with streams * Refactor pipethrough to work with streams * Expose "unicastLookup" DNS lookup and "isUnicastIp" utilities * Use a hardened, HTTP2 compatible, client to perform proxied requests * changeset * tidy * Properly handle compressed streams * tidy * update @types/node * refactor * Improved error management * Expose parseContentEncoding() util * use pipeline from nodejs * Avoid decoding in read-after-write (if possible) * Various fixes * Return Buffer instance from streamToBytes * fixes * Add omit() utility * tidy * lint * typo * Use Buffer instead of ArrayBuffer form pipe through handler result * optimization * tidy * refactor * increase highWaterMark * remove un-necessary type check * Use undici.request where more relevant * Improve soc in fetch utils * feedback * fidy * tidy * test refactor * safer fetch * changeset * expose and re-use extractUrl util * small optimizations * tidy * optimization * build branch --------- Co-authored-by: dholms <dtholmgren@gmail.com>
93 lines
2.7 KiB
TypeScript
93 lines
2.7 KiB
TypeScript
import {
|
|
asRequest,
|
|
DEFAULT_FORBIDDEN_DOMAIN_NAMES,
|
|
Fetch,
|
|
fetchMaxSizeProcessor,
|
|
forbiddenDomainNameRequestTransform,
|
|
protocolCheckRequestTransform,
|
|
redirectCheckRequestTransform,
|
|
requireHostHeaderTransform,
|
|
timedFetch,
|
|
toRequestTransformer,
|
|
} from '@atproto-labs/fetch'
|
|
import { pipe } from '@atproto-labs/pipe'
|
|
|
|
import { unicastFetchWrap } from './unicast.js'
|
|
|
|
export type SafeFetchWrapOptions = NonNullable<
|
|
Parameters<typeof safeFetchWrap>[0]
|
|
>
|
|
|
|
/**
|
|
* Wrap a fetch function with safety checks so that it can be safely used
|
|
* with user provided input (URL).
|
|
*
|
|
* @see {@link https://cheatsheetseries.owasp.org/cheatsheets/Server_Side_Request_Forgery_Prevention_Cheat_Sheet.html}
|
|
*/
|
|
export function safeFetchWrap({
|
|
fetch = globalThis.fetch as Fetch,
|
|
responseMaxSize = 512 * 1024, // 512kB
|
|
ssrfProtection = true,
|
|
allowCustomPort = !ssrfProtection,
|
|
allowData = false,
|
|
allowHttp = !ssrfProtection,
|
|
allowIpHost = true,
|
|
allowPrivateIps = !ssrfProtection,
|
|
timeout = 10e3,
|
|
forbiddenDomainNames = DEFAULT_FORBIDDEN_DOMAIN_NAMES as Iterable<string>,
|
|
} = {}): Fetch<unknown> {
|
|
return toRequestTransformer(
|
|
pipe(
|
|
/**
|
|
* Disable HTTP redirects
|
|
*/
|
|
redirectCheckRequestTransform(),
|
|
|
|
/**
|
|
* Only requests that will be issued with a "Host" header are allowed.
|
|
*/
|
|
allowIpHost ? asRequest : requireHostHeaderTransform(),
|
|
|
|
/**
|
|
* Prevent using http:, file: or data: protocols.
|
|
*/
|
|
protocolCheckRequestTransform({
|
|
'about:': false,
|
|
'data:': allowData,
|
|
'file:': false,
|
|
'http:': allowHttp && { allowCustomPort },
|
|
'https:': { allowCustomPort },
|
|
}),
|
|
|
|
/**
|
|
* Disallow fetching from domains we know are not atproto/OIDC client
|
|
* implementation. Note that other domains can be blocked by providing a
|
|
* custom fetch function combined with another
|
|
* forbiddenDomainNameRequestTransform.
|
|
*/
|
|
forbiddenDomainNameRequestTransform(forbiddenDomainNames),
|
|
|
|
/**
|
|
* Since we will be fetching from the network based on user provided
|
|
* input, let's mitigate resource exhaustion attacks by setting a timeout.
|
|
*/
|
|
timedFetch(
|
|
timeout,
|
|
|
|
/**
|
|
* Since we will be fetching from the network based on user provided
|
|
* input, we need to make sure that the request is not vulnerable to SSRF
|
|
* attacks.
|
|
*/
|
|
allowPrivateIps ? fetch : unicastFetchWrap({ fetch }),
|
|
),
|
|
|
|
/**
|
|
* Since we will be fetching user owned data, we need to make sure that an
|
|
* attacker cannot force us to download a large amounts of data.
|
|
*/
|
|
fetchMaxSizeProcessor(responseMaxSize),
|
|
),
|
|
)
|
|
}
|