mirror of https://github.com/jkjoy/sunpeiwen.git
1 line
8.9 KiB
Plaintext
1 line
8.9 KiB
Plaintext
|
{"version":3,"file":"index.js","sources":["../src/parse.ts"],"sourcesContent":["/**\n * EventSource/Server-Sent Events parser\n * @see https://html.spec.whatwg.org/multipage/server-sent-events.html\n *\n * Based on code from the {@link https://github.com/EventSource/eventsource | EventSource module},\n * which is licensed under the MIT license. And copyrighted the EventSource GitHub organisation.\n */\nimport type {EventSourceParseCallback, EventSourceParser} from './types.js'\n\n/**\n * Creates a new EventSource parser.\n *\n * @param onParse - Callback to invoke when a new event is parsed, or a new reconnection interval\n * has been sent from the server\n *\n * @returns A new EventSource parser, with `parse` and `reset` methods.\n * @public\n */\nexport function createParser(onParse: EventSourceParseCallback): EventSourceParser {\n // Processing state\n let isFirstChunk: boolean\n let buffer: string\n let startingPosition: number\n let startingFieldLength: number\n\n // Event state\n let eventId: string | undefined\n let eventName: string | undefined\n let data: string\n\n reset()\n return {feed, reset}\n\n function reset(): void {\n isFirstChunk = true\n buffer = ''\n startingPosition = 0\n startingFieldLength = -1\n\n eventId = undefined\n eventName = undefined\n data = ''\n }\n\n function feed(chunk: string): void {\n buffer = buffer ? buffer + chunk : chunk\n\n // Strip any UTF8 byte order mark (BOM) at the start of the stream.\n // Note that we do not strip any non - UTF8 BOM, as eventsource streams are\n // always decoded as UTF8 as per the specification.\n if (isFirstChunk && hasBom(buffer)) {\n buffer = buffer.slice(BOM.length)\n }\n\n isFirstChunk = false\n\n // Set up chunk-specific processing state\n const length = buffer.length\n let position = 0\n let discardTrailingNewline = false\n\n // Read the current buffer byte by byte\n while (position < length) {\n // EventSource allows for carriage return + line feed, which means we\n // need to ignore a linefeed character if the previous character was a\n // carriage return\n // @todo refactor to reduce nesting, consider checking previous byte?\n // @todo but consider multiple chunks etc\n if (discardTrailingNewline) {\n if (buffer[position] === '\\n') {\n ++position\n }\n discardTrailingNewline = false\n }\n\n let lineLength = -1\n let fieldLength = startingFieldLength\n let character: string\n\n for (let index = startingPosition; lineLength < 0 && index < length; ++index) {\n character = buffer[index]\n if (character === ':' && fieldLength < 0) {\n fieldLength = index - position\n } else if (character === '\\r') {\n discardTrailingNewline = true\n lineLength = index - position\n } else if (character === '\\n') {\n lineLength = index - position\n }\n }\n\n if (lineLength < 0) {\n startingPosition = length - position\n startingFieldLength = fieldLength\n break\n } else {\n startingPosition = 0\n startingFieldLength = -1\n }\n\n parseEventStreamLine(buffer, position, fieldLength, lineLength)\n\n position += lineLength + 1\n }\n\n if (position === length) {\n // If we consumed the entire buffer to read the event, reset the buffer\n buffer = ''\n } else if (position > 0) {\n // If there are bytes left to process, set the buffer to the unprocessed\n // portion of the buffer only\n buffer = buffer.slice(position)\n }\n }\n\n function parseEventStreamLine(\n lineBuffer: string,\n index: number,\n fieldLength: number,\n lineLength: number,\n ) {\n if (lineLength === 0) {\n // We reached the last line of this event\n if (data.length > 0) {\n onParse({\n type: 'event',\n id: eventId,\n event: eventName || undefined,\n data: data.slice(0, -1), // remove t
|