mirror of
https://github.com/JonasunderscoreJones/api.jonasjones.dev.git
synced 2025-10-23 20:19:19 +02:00
Initial commit (by create-cloudflare CLI)
This commit is contained in:
commit
58a42872a0
1745 changed files with 741893 additions and 0 deletions
176
node_modules/get-source/get-source.js
generated
vendored
Normal file
176
node_modules/get-source/get-source.js
generated
vendored
Normal file
|
@ -0,0 +1,176 @@
|
|||
"use strict";
|
||||
|
||||
/* ------------------------------------------------------------------------ */
|
||||
|
||||
const { assign } = Object,
|
||||
isBrowser = (typeof window !== 'undefined') && (window.window === window) && window.navigator,
|
||||
SourceMapConsumer = require ('source-map').SourceMapConsumer,
|
||||
SyncPromise = require ('./impl/SyncPromise'),
|
||||
path = require ('./impl/path'),
|
||||
dataURIToBuffer = require ('data-uri-to-buffer'),
|
||||
nodeRequire = isBrowser ? null : module.require
|
||||
|
||||
/* ------------------------------------------------------------------------ */
|
||||
|
||||
const memoize = f => {
|
||||
|
||||
const m = x => (x in m.cache) ? m.cache[x] : (m.cache[x] = f(x))
|
||||
m.forgetEverything = () => { m.cache = Object.create (null) }
|
||||
m.cache = Object.create (null)
|
||||
|
||||
return m
|
||||
}
|
||||
|
||||
function impl (fetchFile, sync) {
|
||||
|
||||
const PromiseImpl = sync ? SyncPromise : Promise
|
||||
const SourceFileMemoized = memoize (path => SourceFile (path, fetchFile (path)))
|
||||
|
||||
function SourceFile (srcPath, text) {
|
||||
if (text === undefined) return SourceFileMemoized (path.resolve (srcPath))
|
||||
|
||||
return PromiseImpl.resolve (text).then (text => {
|
||||
|
||||
let file
|
||||
let lines
|
||||
let resolver
|
||||
let _resolve = loc => (resolver = resolver || SourceMapResolverFromFetchedFile (file)) (loc)
|
||||
|
||||
return (file = {
|
||||
path: srcPath,
|
||||
text,
|
||||
get lines () { return lines = (lines || text.split ('\n')) },
|
||||
resolve (loc) {
|
||||
const result = _resolve (loc)
|
||||
if (sync) {
|
||||
try { return SyncPromise.valueFrom (result) }
|
||||
catch (e) { return assign ({}, loc, { error: e }) }
|
||||
} else {
|
||||
return Promise.resolve (result)
|
||||
}
|
||||
},
|
||||
_resolve,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function SourceMapResolverFromFetchedFile (file) {
|
||||
|
||||
/* Extract the last sourceMap occurence (TODO: support multiple sourcemaps) */
|
||||
|
||||
const re = /\u0023 sourceMappingURL=(.+)\n?/g
|
||||
let lastMatch = undefined
|
||||
|
||||
while (true) {
|
||||
const match = re.exec (file.text)
|
||||
if (match) lastMatch = match
|
||||
else break
|
||||
}
|
||||
|
||||
const url = lastMatch && lastMatch[1]
|
||||
|
||||
const defaultResolver = loc => assign ({}, loc, {
|
||||
sourceFile: file,
|
||||
sourceLine: (file.lines[loc.line - 1] || '')
|
||||
})
|
||||
|
||||
return url ? SourceMapResolver (file.path, url, defaultResolver)
|
||||
: defaultResolver
|
||||
}
|
||||
|
||||
function SourceMapResolver (originalFilePath, sourceMapPath, fallbackResolve) {
|
||||
|
||||
const srcFile = sourceMapPath.startsWith ('data:')
|
||||
? SourceFile (originalFilePath, dataURIToBuffer (sourceMapPath).toString ())
|
||||
: SourceFile (path.relativeToFile (originalFilePath, sourceMapPath))
|
||||
|
||||
const parsedMap = srcFile.then (f => SourceMapConsumer (JSON.parse (f.text)))
|
||||
|
||||
const sourceFor = memoize (function sourceFor (filePath) {
|
||||
return srcFile.then (f => {
|
||||
const fullPath = path.relativeToFile (f.path, filePath)
|
||||
return parsedMap.then (x => SourceFile (
|
||||
fullPath,
|
||||
x.sourceContentFor (filePath, true /* return null on missing */) || undefined))
|
||||
})
|
||||
})
|
||||
|
||||
return loc => parsedMap.then (x => {
|
||||
const originalLoc = x.originalPositionFor (loc)
|
||||
return originalLoc.source ? sourceFor (originalLoc.source).then (x =>
|
||||
x._resolve (assign ({}, loc, {
|
||||
line: originalLoc.line,
|
||||
column: originalLoc.column + 1,
|
||||
name: originalLoc.name
|
||||
}))
|
||||
)
|
||||
: fallbackResolve (loc)
|
||||
}).catch (e =>
|
||||
assign (fallbackResolve (loc), { sourceMapError: e }))
|
||||
}
|
||||
|
||||
return assign (function getSource (path) {
|
||||
const file = SourceFile (path)
|
||||
if (sync) {
|
||||
try { return SyncPromise.valueFrom (file) }
|
||||
catch (e) {
|
||||
const noFile = {
|
||||
path,
|
||||
text: '',
|
||||
lines: [],
|
||||
error: e,
|
||||
resolve (loc) {
|
||||
return assign ({}, loc, { error: e, sourceLine: '', sourceFile: noFile })
|
||||
}
|
||||
}
|
||||
return noFile
|
||||
}
|
||||
}
|
||||
return file
|
||||
}, {
|
||||
resetCache: () => SourceFileMemoized.forgetEverything (),
|
||||
getCache: () => SourceFileMemoized.cache
|
||||
})
|
||||
}
|
||||
|
||||
/* ------------------------------------------------------------------------ */
|
||||
|
||||
module.exports = impl (function fetchFileSync (path) {
|
||||
return new SyncPromise (resolve => {
|
||||
if (isBrowser) {
|
||||
let xhr = new XMLHttpRequest ()
|
||||
xhr.open ('GET', path, false /* SYNCHRONOUS XHR FTW :) */)
|
||||
xhr.send (null)
|
||||
resolve (xhr.responseText)
|
||||
} else {
|
||||
resolve (nodeRequire ('fs').readFileSync (path, { encoding: 'utf8' }))
|
||||
}
|
||||
})
|
||||
}, true)
|
||||
|
||||
/* ------------------------------------------------------------------------ */
|
||||
|
||||
module.exports.async = impl (function fetchFileAsync (path) {
|
||||
return new Promise ((resolve, reject) => {
|
||||
if (isBrowser) {
|
||||
let xhr = new XMLHttpRequest ()
|
||||
xhr.open ('GET', path)
|
||||
xhr.onreadystatechange = event => {
|
||||
if (xhr.readyState === 4) {
|
||||
if (xhr.status === 200) {
|
||||
resolve (xhr.responseText)
|
||||
} else {
|
||||
reject (new Error (xhr.statusText))
|
||||
}
|
||||
}
|
||||
}
|
||||
xhr.send (null)
|
||||
} else {
|
||||
nodeRequire ('fs').readFile (path, { encoding: 'utf8' }, (e, x) => {
|
||||
e ? reject (e) : resolve (x)
|
||||
})
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
/* ------------------------------------------------------------------------ */
|
Loading…
Add table
Add a link
Reference in a new issue