diff --git a/package.json b/package.json index a85f222..5432777 100755 --- a/package.json +++ b/package.json @@ -18,7 +18,7 @@ "build": "rollup -c rollup.config.js", "test": "jest", "flow": "flow check", - "format": "prettier --write src/**/*.js", + "format": "prettier --write 'src/**/*.js'", "prepublishOnly": "run-p flow build" }, "husky": { diff --git a/src/defaultProps.js b/src/defaultProps.js index 01899da..6c43060 100755 --- a/src/defaultProps.js +++ b/src/defaultProps.js @@ -1,14 +1,14 @@ -// @flow - -import Prism from "./vendor/prism"; -import theme from "./themes/duotoneDark"; - -import type { PrismLib } from "./types"; - -const defaultProps = { - // $FlowFixMe - Prism: (Prism: PrismLib), - theme -}; - -export default defaultProps; +// @flow + +import Prism from "./vendor/prism"; +import theme from "./themes/duotoneDark"; + +import type { PrismLib } from "./types"; + +const defaultProps = { + // $FlowFixMe + Prism: (Prism: PrismLib), + theme, +}; + +export default defaultProps; diff --git a/src/index.js b/src/index.js index 8efbd4f..799271f 100755 --- a/src/index.js +++ b/src/index.js @@ -1,12 +1,9 @@ -// @flow - -import Prism from './vendor/prism' -import defaultProps from './defaultProps' -import Highlight from './components/Highlight' - -export { - Prism, - defaultProps -} - -export default Highlight +// @flow + +import Prism from "./vendor/prism"; +import defaultProps from "./defaultProps"; +import Highlight from "./components/Highlight"; + +export { Prism, defaultProps }; + +export default Highlight; diff --git a/src/types.js b/src/types.js index a335264..21c792b 100755 --- a/src/types.js +++ b/src/types.js @@ -1,112 +1,116 @@ -// @flow - -import type { Key } from "react"; -import includedLangs from "./vendor/prism/includeLangs"; - -export type Language = $Keys; - -type PrismGrammar = { - [key: string]: mixed -}; - -type LanguagesDict = { - [lang: Language]: PrismGrammar -}; - -export type PrismToken = { - type: string | string[], - alias: string | string[], - content: Array | string -}; - -export type Token = { - types: string[], - content: string, - empty?: boolean -}; - -export type PrismLib = { - languages: LanguagesDict, - tokenize: ( - code: string, - grammar: PrismGrammar, - language: Language - ) => Array, - highlight: (code: string, grammar: PrismGrammar, language: Language) => string -}; - -export type StyleObj = { - [key: string]: string | number | null -}; - -export type LineInputProps = { - key?: Key, - style?: StyleObj, - className?: string, - line: Token[], - [key: string]: mixed -}; - -export type LineOutputProps = { - key?: Key, - style?: StyleObj, - className: string, - [key: string]: mixed -}; - -export type TokenInputProps = { - key?: Key, - style?: StyleObj, - className?: string, - token: Token, - [key: string]: mixed -}; - -export type TokenOutputProps = { - key?: Key, - style?: StyleObj, - className: string, - children: string, - [key: string]: mixed -}; - -export type RenderProps = { - tokens: Token[][], - className: string, - getLineProps: (input: LineInputProps) => LineOutputProps, - getTokenProps: (input: TokenInputProps) => TokenOutputProps -}; - -export type PrismThemeEntry = { - color?: string, - backgroundColor?: string, - fontStyle?: "normal" | "italic", - fontWeight?: - | "normal" - | "bold" - | "100" - | "200" - | "300" - | "400" - | "500" - | "600" - | "700" - | "800" - | "900", - textDecorationLine?: - | "none" - | "underline" - | "line-through" - | "underline line-through", - opacity?: number, - [styleKey: string]: string | number | void -}; - -export type PrismTheme = { - plain: PrismThemeEntry, - styles: Array<{ - types: string[], - style: PrismThemeEntry, - languages?: Language[] - }> -}; +// @flow + +import type { Key } from "react"; +import includedLangs from "./vendor/prism/includeLangs"; + +export type Language = $Keys; + +type PrismGrammar = { + [key: string]: mixed, +}; + +type LanguagesDict = { + [lang: Language]: PrismGrammar, +}; + +export type PrismToken = { + type: string | string[], + alias: string | string[], + content: Array | string, +}; + +export type Token = { + types: string[], + content: string, + empty?: boolean, +}; + +export type PrismLib = { + languages: LanguagesDict, + tokenize: ( + code: string, + grammar: PrismGrammar, + language: Language + ) => Array, + highlight: ( + code: string, + grammar: PrismGrammar, + language: Language + ) => string, +}; + +export type StyleObj = { + [key: string]: string | number | null, +}; + +export type LineInputProps = { + key?: Key, + style?: StyleObj, + className?: string, + line: Token[], + [key: string]: mixed, +}; + +export type LineOutputProps = { + key?: Key, + style?: StyleObj, + className: string, + [key: string]: mixed, +}; + +export type TokenInputProps = { + key?: Key, + style?: StyleObj, + className?: string, + token: Token, + [key: string]: mixed, +}; + +export type TokenOutputProps = { + key?: Key, + style?: StyleObj, + className: string, + children: string, + [key: string]: mixed, +}; + +export type RenderProps = { + tokens: Token[][], + className: string, + getLineProps: (input: LineInputProps) => LineOutputProps, + getTokenProps: (input: TokenInputProps) => TokenOutputProps, +}; + +export type PrismThemeEntry = { + color?: string, + backgroundColor?: string, + fontStyle?: "normal" | "italic", + fontWeight?: + | "normal" + | "bold" + | "100" + | "200" + | "300" + | "400" + | "500" + | "600" + | "700" + | "800" + | "900", + textDecorationLine?: + | "none" + | "underline" + | "line-through" + | "underline line-through", + opacity?: number, + [styleKey: string]: string | number | void, +}; + +export type PrismTheme = { + plain: PrismThemeEntry, + styles: Array<{ + types: string[], + style: PrismThemeEntry, + languages?: Language[], + }>, +}; diff --git a/src/utils/__tests__/normalizeTokens.test.js b/src/utils/__tests__/normalizeTokens.test.js index 4d8500b..cf5a09f 100755 --- a/src/utils/__tests__/normalizeTokens.test.js +++ b/src/utils/__tests__/normalizeTokens.test.js @@ -1,200 +1,200 @@ -import normalizeTokens from "../normalizeTokens"; - -describe("normalizeTokens", () => { - it("handles plain strings", () => { - const input = ["hello", "world"]; - const output = normalizeTokens(input); - - expect(output).toEqual([ - [ - { types: ["plain"], content: "hello" }, - { types: ["plain"], content: "world" } - ] - ]); - }); - - it("handles flat tokens", () => { - const input = [ - { type: "test1", content: "hello" }, - { type: "test2", content: "world" } - ]; - const output = normalizeTokens(input); - - expect(output).toEqual([ - [ - { types: ["test1"], content: "hello" }, - { types: ["test2"], content: "world" } - ] - ]); - }); - - it("handles nested tokens", () => { - const input = [ - { - type: "test1", - content: [ - { type: "nest1", content: "he" }, - { type: "nest2", content: "llo" } - ] - }, - { type: "test2", content: "world" } - ]; - const output = normalizeTokens(input); - - expect(output).toEqual([ - [ - { types: ["test1", "nest1"], content: "he" }, - { types: ["test1", "nest2"], content: "llo" }, - { types: ["test2"], content: "world" } - ] - ]); - }); - - it("handles nested & mixed tokens", () => { - const input = [ - { - type: "test1", - content: [{ type: "nest", content: "he" }, "llo"] - }, - { type: "test2", content: "world" }, - "!" - ]; - const output = normalizeTokens(input); - - expect(output).toEqual([ - [ - { types: ["test1", "nest"], content: "he" }, - { types: ["test1"], content: "llo" }, - { types: ["test2"], content: "world" }, - { types: ["plain"], content: "!" } - ] - ]); - }); - - it("handles deeply nested tokens", () => { - const input = [ - { - type: "1", - content: [ - { - type: "2", - content: [{ type: "3", content: "hello" }] - } - ] - } - ]; - const output = normalizeTokens(input); - - expect(output).toEqual([[{ types: ["1", "2", "3"], content: "hello" }]]); - }); - - it("handles plain strings with newlines", () => { - const input = ["hello", " \nworld"]; - const output = normalizeTokens(input); - - expect(output).toEqual([ - [ - { types: ["plain"], content: "hello" }, - { types: ["plain"], content: " " } - ], - [{ types: ["plain"], content: "world" }] - ]); - }); - - it("handles flat tokens with newlines", () => { - const input = [ - { type: "test1", content: "hello" }, - { type: "test2", content: "wor\nld" } - ]; - const output = normalizeTokens(input); - - expect(output).toEqual([ - [ - { types: ["test1"], content: "hello" }, - { types: ["test2"], content: "wor" } - ], - [{ types: ["test2"], content: "ld" }] - ]); - }); - - it("handles nested tokens with newlines", () => { - const input = [ - { - type: "test1", - content: [ - { type: "nest1", content: "he" }, - { type: "nest2", content: "l\nlo" } - ] - }, - { type: "test2", content: "wor\nld" } - ]; - const output = normalizeTokens(input); - - expect(output).toEqual([ - [ - { types: ["test1", "nest1"], content: "he" }, - { types: ["test1", "nest2"], content: "l" } - ], - [ - { types: ["test1", "nest2"], content: "lo" }, - { types: ["test2"], content: "wor" } - ], - [{ types: ["test2"], content: "ld" }] - ]); - }); - - it("handles nested & mixed tokens with newlines", () => { - const input = [ - { - type: "test1", - content: [{ type: "nest", content: "h\ne" }, "l\nlo"] - }, - "world\n!" - ]; - const output = normalizeTokens(input); - - expect(output).toEqual([ - [{ types: ["test1", "nest"], content: "h" }], - [ - { types: ["test1", "nest"], content: "e" }, - { types: ["test1"], content: "l" } - ], - [ - { types: ["test1"], content: "lo" }, - { types: ["plain"], content: "world" } - ], - [{ types: ["plain"], content: "!" }] - ]); - }); - - it("handles deeply nested tokens with newlines", () => { - const input = [ - { - type: "1", - content: [ - { - type: "2", - content: [{ type: "3", content: "hel\nlo" }] - } - ] - } - ]; - const output = normalizeTokens(input); - - expect(output).toEqual([ - [{ types: ["1", "2", "3"], content: "hel" }], - [{ types: ["1", "2", "3"], content: "lo" }] - ]); - }); - - it("handles empty lines gracefully", () => { - const input = ["\n\n"]; - const output = normalizeTokens(input); - - expect(output).toEqual([ - [{ types: ["plain"], content: "\n", empty: true }], - [{ types: ["plain"], content: "\n", empty: true }], - [{ types: ["plain"], content: "\n", empty: true }] - ]); - }); -}); +import normalizeTokens from "../normalizeTokens"; + +describe("normalizeTokens", () => { + it("handles plain strings", () => { + const input = ["hello", "world"]; + const output = normalizeTokens(input); + + expect(output).toEqual([ + [ + { types: ["plain"], content: "hello" }, + { types: ["plain"], content: "world" }, + ], + ]); + }); + + it("handles flat tokens", () => { + const input = [ + { type: "test1", content: "hello" }, + { type: "test2", content: "world" }, + ]; + const output = normalizeTokens(input); + + expect(output).toEqual([ + [ + { types: ["test1"], content: "hello" }, + { types: ["test2"], content: "world" }, + ], + ]); + }); + + it("handles nested tokens", () => { + const input = [ + { + type: "test1", + content: [ + { type: "nest1", content: "he" }, + { type: "nest2", content: "llo" }, + ], + }, + { type: "test2", content: "world" }, + ]; + const output = normalizeTokens(input); + + expect(output).toEqual([ + [ + { types: ["test1", "nest1"], content: "he" }, + { types: ["test1", "nest2"], content: "llo" }, + { types: ["test2"], content: "world" }, + ], + ]); + }); + + it("handles nested & mixed tokens", () => { + const input = [ + { + type: "test1", + content: [{ type: "nest", content: "he" }, "llo"], + }, + { type: "test2", content: "world" }, + "!", + ]; + const output = normalizeTokens(input); + + expect(output).toEqual([ + [ + { types: ["test1", "nest"], content: "he" }, + { types: ["test1"], content: "llo" }, + { types: ["test2"], content: "world" }, + { types: ["plain"], content: "!" }, + ], + ]); + }); + + it("handles deeply nested tokens", () => { + const input = [ + { + type: "1", + content: [ + { + type: "2", + content: [{ type: "3", content: "hello" }], + }, + ], + }, + ]; + const output = normalizeTokens(input); + + expect(output).toEqual([[{ types: ["1", "2", "3"], content: "hello" }]]); + }); + + it("handles plain strings with newlines", () => { + const input = ["hello", " \nworld"]; + const output = normalizeTokens(input); + + expect(output).toEqual([ + [ + { types: ["plain"], content: "hello" }, + { types: ["plain"], content: " " }, + ], + [{ types: ["plain"], content: "world" }], + ]); + }); + + it("handles flat tokens with newlines", () => { + const input = [ + { type: "test1", content: "hello" }, + { type: "test2", content: "wor\nld" }, + ]; + const output = normalizeTokens(input); + + expect(output).toEqual([ + [ + { types: ["test1"], content: "hello" }, + { types: ["test2"], content: "wor" }, + ], + [{ types: ["test2"], content: "ld" }], + ]); + }); + + it("handles nested tokens with newlines", () => { + const input = [ + { + type: "test1", + content: [ + { type: "nest1", content: "he" }, + { type: "nest2", content: "l\nlo" }, + ], + }, + { type: "test2", content: "wor\nld" }, + ]; + const output = normalizeTokens(input); + + expect(output).toEqual([ + [ + { types: ["test1", "nest1"], content: "he" }, + { types: ["test1", "nest2"], content: "l" }, + ], + [ + { types: ["test1", "nest2"], content: "lo" }, + { types: ["test2"], content: "wor" }, + ], + [{ types: ["test2"], content: "ld" }], + ]); + }); + + it("handles nested & mixed tokens with newlines", () => { + const input = [ + { + type: "test1", + content: [{ type: "nest", content: "h\ne" }, "l\nlo"], + }, + "world\n!", + ]; + const output = normalizeTokens(input); + + expect(output).toEqual([ + [{ types: ["test1", "nest"], content: "h" }], + [ + { types: ["test1", "nest"], content: "e" }, + { types: ["test1"], content: "l" }, + ], + [ + { types: ["test1"], content: "lo" }, + { types: ["plain"], content: "world" }, + ], + [{ types: ["plain"], content: "!" }], + ]); + }); + + it("handles deeply nested tokens with newlines", () => { + const input = [ + { + type: "1", + content: [ + { + type: "2", + content: [{ type: "3", content: "hel\nlo" }], + }, + ], + }, + ]; + const output = normalizeTokens(input); + + expect(output).toEqual([ + [{ types: ["1", "2", "3"], content: "hel" }], + [{ types: ["1", "2", "3"], content: "lo" }], + ]); + }); + + it("handles empty lines gracefully", () => { + const input = ["\n\n"]; + const output = normalizeTokens(input); + + expect(output).toEqual([ + [{ types: ["plain"], content: "\n", empty: true }], + [{ types: ["plain"], content: "\n", empty: true }], + [{ types: ["plain"], content: "\n", empty: true }], + ]); + }); +}); diff --git a/src/utils/__tests__/themeToDict.test.js b/src/utils/__tests__/themeToDict.test.js index 7303c7a..405473f 100755 --- a/src/utils/__tests__/themeToDict.test.js +++ b/src/utils/__tests__/themeToDict.test.js @@ -1,73 +1,75 @@ -import themeToDict from "../themeToDict" - -describe("themeToDict", () => { - it("converts entry.types to dictionary", () => { - const input = { - plain: { color: "red" }, - styles: [ - { - types: ["1", "2"], - style: { - color: "green", - }, - }, - { - types: ["3"], - style: { - color: "blue", - }, - }, - { - types: ["2"], - style: { - color: "orange", - }, - }, - ], - } - - const expected = { - root: { - color: "red", - }, - plain: { - color: "red", - backgroundColor: null, - }, - 1: { - color: "green", - }, - 2: { - color: "orange", - }, - 3: { - color: "blue", - }, - } - - expect(themeToDict(input)).toEqual(expected) - // Check order in which keys were added to implicitly test merge strategy - expect(Object.keys(themeToDict(input, 'js'))).toEqual(Object.keys(expected)) - }) - - it("limits entries by entry.languages", () => { - const input = { - plain: {}, - styles: [ - { - types: ['test'], - languages: ['js'], - style: { - color: "green", - }, - } - ], - } - - expect(themeToDict(input, 'js').test).toEqual({ - color: 'green' - }) - - expect(themeToDict(input, 'ocaml').test).toEqual(undefined) - }) -}) +import themeToDict from "../themeToDict"; + +describe("themeToDict", () => { + it("converts entry.types to dictionary", () => { + const input = { + plain: { color: "red" }, + styles: [ + { + types: ["1", "2"], + style: { + color: "green", + }, + }, + { + types: ["3"], + style: { + color: "blue", + }, + }, + { + types: ["2"], + style: { + color: "orange", + }, + }, + ], + }; + + const expected = { + root: { + color: "red", + }, + plain: { + color: "red", + backgroundColor: null, + }, + 1: { + color: "green", + }, + 2: { + color: "orange", + }, + 3: { + color: "blue", + }, + }; + + expect(themeToDict(input)).toEqual(expected); + // Check order in which keys were added to implicitly test merge strategy + expect(Object.keys(themeToDict(input, "js"))).toEqual( + Object.keys(expected) + ); + }); + + it("limits entries by entry.languages", () => { + const input = { + plain: {}, + styles: [ + { + types: ["test"], + languages: ["js"], + style: { + color: "green", + }, + }, + ], + }; + + expect(themeToDict(input, "js").test).toEqual({ + color: "green", + }); + + expect(themeToDict(input, "ocaml").test).toEqual(undefined); + }); +}); diff --git a/src/vendor/prism/includeLangs.js b/src/vendor/prism/includeLangs.js index c88c748..b30066e 100755 --- a/src/vendor/prism/includeLangs.js +++ b/src/vendor/prism/includeLangs.js @@ -1,39 +1,39 @@ -// @flow - -// These are the languages that'll be included in the generated -// prism/index.js file - -module.exports = { - markup: true, - bash: true, - clike: true, - c: true, - cpp: true, - css: true, - "css-extras": true, - javascript: true, - jsx: true, - "js-extras": true, - coffeescript: true, - diff: true, - git: true, - go: true, - graphql: true, - handlebars: true, - json: true, - less: true, - makefile: true, - markdown: true, - objectivec: true, - ocaml: true, - python: true, - reason: true, - sass: true, - scss: true, - sql: true, - stylus: true, - tsx: true, - typescript: true, - wasm: true, - yaml: true -}; +// @flow + +// These are the languages that'll be included in the generated +// prism/index.js file + +module.exports = { + markup: true, + bash: true, + clike: true, + c: true, + cpp: true, + css: true, + "css-extras": true, + javascript: true, + jsx: true, + "js-extras": true, + coffeescript: true, + diff: true, + git: true, + go: true, + graphql: true, + handlebars: true, + json: true, + less: true, + makefile: true, + markdown: true, + objectivec: true, + ocaml: true, + python: true, + reason: true, + sass: true, + scss: true, + sql: true, + stylus: true, + tsx: true, + typescript: true, + wasm: true, + yaml: true, +}; diff --git a/src/vendor/prism/index.js b/src/vendor/prism/index.js index f76131e..bf5b10c 100755 --- a/src/vendor/prism/index.js +++ b/src/vendor/prism/index.js @@ -1,82 +1,82 @@ -import Prism from './prism-core' -import codegen from 'codegen.macro' - -// Babel Codegen Macro: -// Get a list of all prismjs languages and inline them here. -// They should only depend on "Prism" being present in the current scope. - -codegen` - const { readFileSync } = require('fs') - const { dirname, join } = require('path') - const { languages } = require('prismjs/components') - const prismPath = dirname(require.resolve('prismjs')) - - let output = '/* This content is auto-generated to include some prismjs language components: */\\n' - - const toDependencies = arr => { - if (typeof arr === 'string') { - return [arr] - } - - return arr; - }; - - const addLanguageToOutput = language => { - const pathToLanguage = 'components/prism-' + language - const fullPath = join(prismPath, pathToLanguage + '.js') - const contents = readFileSync(fullPath, 'utf8') - const header = '\\n\\n/* "prismjs/' + pathToLanguage + '" */\\n' - output += header + contents - } - - const visitedLanguages = {} - - const visitLanguage = (language, langEntry) => { - // Mark language as visited or return if it was - if (visitedLanguages[language]) { - return - } else { - visitedLanguages[language] = true - } - - // Required dependencies come before the actual language - const required = toDependencies(langEntry.require) - - if (Array.isArray(required)) { - required.forEach(x => { - if (languages[x]) { - visitLanguage(x, languages[x]) - } else { - console.warn('[prismjs/components]: Language', x, 'does not exist!') - } - }) - } - - // Add current language to output - addLanguageToOutput(language) - - // Peer dependencies come after the actual language - const peerDependencies = toDependencies(langEntry.peerDependencies) - - if (Array.isArray(peerDependencies)) { - peerDependencies.forEach(x => { - if (languages[x]) { - visitLanguage(x, languages[x]) - } else { - console.warn('[prismjs/components]: Language', x, 'does not exist!') - } - }) - } - }; - - // This json defines which languages to include - const includedLangs = require('./includeLangs') - - Object.keys(includedLangs).forEach(language => { - visitLanguage(language, languages[language]) - }) - - module.exports = output -` - -export default Prism +import Prism from "./prism-core"; +import codegen from "codegen.macro"; + +// Babel Codegen Macro: +// Get a list of all prismjs languages and inline them here. +// They should only depend on "Prism" being present in the current scope. + +codegen` + const { readFileSync } = require('fs') + const { dirname, join } = require('path') + const { languages } = require('prismjs/components') + const prismPath = dirname(require.resolve('prismjs')) + + let output = '/* This content is auto-generated to include some prismjs language components: */\\n' + + const toDependencies = arr => { + if (typeof arr === 'string') { + return [arr] + } + + return arr; + }; + + const addLanguageToOutput = language => { + const pathToLanguage = 'components/prism-' + language + const fullPath = join(prismPath, pathToLanguage + '.js') + const contents = readFileSync(fullPath, 'utf8') + const header = '\\n\\n/* "prismjs/' + pathToLanguage + '" */\\n' + output += header + contents + } + + const visitedLanguages = {} + + const visitLanguage = (language, langEntry) => { + // Mark language as visited or return if it was + if (visitedLanguages[language]) { + return + } else { + visitedLanguages[language] = true + } + + // Required dependencies come before the actual language + const required = toDependencies(langEntry.require) + + if (Array.isArray(required)) { + required.forEach(x => { + if (languages[x]) { + visitLanguage(x, languages[x]) + } else { + console.warn('[prismjs/components]: Language', x, 'does not exist!') + } + }) + } + + // Add current language to output + addLanguageToOutput(language) + + // Peer dependencies come after the actual language + const peerDependencies = toDependencies(langEntry.peerDependencies) + + if (Array.isArray(peerDependencies)) { + peerDependencies.forEach(x => { + if (languages[x]) { + visitLanguage(x, languages[x]) + } else { + console.warn('[prismjs/components]: Language', x, 'does not exist!') + } + }) + } + }; + + // This json defines which languages to include + const includedLangs = require('./includeLangs') + + Object.keys(includedLangs).forEach(language => { + visitLanguage(language, languages[language]) + }) + + module.exports = output +`; + +export default Prism; diff --git a/src/vendor/prism/prism-core.js b/src/vendor/prism/prism-core.js index 1fbea36..61189a4 100755 --- a/src/vendor/prism/prism-core.js +++ b/src/vendor/prism/prism-core.js @@ -1,429 +1,429 @@ -/** - * Prism: Lightweight, robust, elegant syntax highlighting - * MIT license http://www.opensource.org/licenses/mit-license.php/ - * @author Lea Verou http://lea.verou.me - */ - -/** - * prism-react-renderer: - * This file has been modified to remove: - * - globals and window dependency - * - worker support - * - highlightAll and other element dependent methods - * - _.hooks helpers - * - UMD/node-specific hacks - * It has also been run through prettier - */ - -var Prism = (function() { - // Private helper vars - var lang = /\blang(?:uage)?-([\w-]+)\b/i; - var uniqueId = 0; - - var _ = { - util: { - encode: function(tokens) { - if (tokens instanceof Token) { - return new Token( - tokens.type, - _.util.encode(tokens.content), - tokens.alias - ); - } else if (_.util.type(tokens) === "Array") { - return tokens.map(_.util.encode); - } else { - return tokens - .replace(/&/g, "&") - .replace(/ text.length) { - // Something went terribly wrong, ABORT, ABORT! - return; - } - - if (str instanceof Token) { - continue; - } - - if (greedy && i != strarr.length - 1) { - pattern.lastIndex = pos; - var match = pattern.exec(text); - if (!match) { - break; - } - - var from = match.index + (lookbehind ? match[1].length : 0), - to = match.index + match[0].length, - k = i, - p = pos; - - for ( - var len = strarr.length; - k < len && - (p < to || (!strarr[k].type && !strarr[k - 1].greedy)); - ++k - ) { - p += strarr[k].length; - // Move the index i to the element in strarr that is closest to from - if (from >= p) { - ++i; - pos = p; - } - } - - // If strarr[i] is a Token, then the match starts inside another Token, which is invalid - if (strarr[i] instanceof Token) { - continue; - } - - // Number of tokens to delete and replace with the new match - delNum = k - i; - str = text.slice(pos, p); - match.index -= pos; - } else { - pattern.lastIndex = 0; - - var match = pattern.exec(str), - delNum = 1; - } - - if (!match) { - if (oneshot) { - break; - } - - continue; - } - - if (lookbehind) { - lookbehindLength = match[1] ? match[1].length : 0; - } - - var from = match.index + lookbehindLength, - match = match[0].slice(lookbehindLength), - to = from + match.length, - before = str.slice(0, from), - after = str.slice(to); - - var args = [i, delNum]; - - if (before) { - ++i; - pos += before.length; - args.push(before); - } - - var wrapped = new Token( - token, - inside ? _.tokenize(match, inside) : match, - alias, - match, - greedy - ); - - args.push(wrapped); - - if (after) { - args.push(after); - } - - Array.prototype.splice.apply(strarr, args); - - if (delNum != 1) - _.matchGrammar(text, strarr, grammar, i, pos, true, token); - - if (oneshot) break; - } - } - } - }, - - hooks: { - add: function() {} - }, - - tokenize: function(text, grammar, language) { - var strarr = [text]; - - var rest = grammar.rest; - - if (rest) { - for (var token in rest) { - grammar[token] = rest[token]; - } - - delete grammar.rest; - } - - _.matchGrammar(text, strarr, grammar, 0, 0, false); - - return strarr; - } - }; - - var Token = (_.Token = function(type, content, alias, matchedStr, greedy) { - this.type = type; - this.content = content; - this.alias = alias; - // Copy of the full string this token was created from - this.length = (matchedStr || "").length | 0; - this.greedy = !!greedy; - }); - - Token.stringify = function(o, language, parent) { - if (typeof o == "string") { - return o; - } - - if (_.util.type(o) === "Array") { - return o - .map(function(element) { - return Token.stringify(element, language, o); - }) - .join(""); - } - - var env = { - type: o.type, - content: Token.stringify(o.content, language, parent), - tag: "span", - classes: ["token", o.type], - attributes: {}, - language: language, - parent: parent - }; - - if (o.alias) { - var aliases = _.util.type(o.alias) === "Array" ? o.alias : [o.alias]; - Array.prototype.push.apply(env.classes, aliases); - } - - var attributes = Object.keys(env.attributes) - .map(function(name) { - return ( - name + - '="' + - (env.attributes[name] || "").replace(/"/g, """) + - '"' - ); - }) - .join(" "); - - return ( - "<" + - env.tag + - ' class="' + - env.classes.join(" ") + - '"' + - (attributes ? " " + attributes : "") + - ">" + - env.content + - "" - ); - }; - - return _; -})(); - -export default Prism; +/** + * Prism: Lightweight, robust, elegant syntax highlighting + * MIT license http://www.opensource.org/licenses/mit-license.php/ + * @author Lea Verou http://lea.verou.me + */ + +/** + * prism-react-renderer: + * This file has been modified to remove: + * - globals and window dependency + * - worker support + * - highlightAll and other element dependent methods + * - _.hooks helpers + * - UMD/node-specific hacks + * It has also been run through prettier + */ + +var Prism = (function () { + // Private helper vars + var lang = /\blang(?:uage)?-([\w-]+)\b/i; + var uniqueId = 0; + + var _ = { + util: { + encode: function (tokens) { + if (tokens instanceof Token) { + return new Token( + tokens.type, + _.util.encode(tokens.content), + tokens.alias + ); + } else if (_.util.type(tokens) === "Array") { + return tokens.map(_.util.encode); + } else { + return tokens + .replace(/&/g, "&") + .replace(/ text.length) { + // Something went terribly wrong, ABORT, ABORT! + return; + } + + if (str instanceof Token) { + continue; + } + + if (greedy && i != strarr.length - 1) { + pattern.lastIndex = pos; + var match = pattern.exec(text); + if (!match) { + break; + } + + var from = match.index + (lookbehind ? match[1].length : 0), + to = match.index + match[0].length, + k = i, + p = pos; + + for ( + var len = strarr.length; + k < len && + (p < to || (!strarr[k].type && !strarr[k - 1].greedy)); + ++k + ) { + p += strarr[k].length; + // Move the index i to the element in strarr that is closest to from + if (from >= p) { + ++i; + pos = p; + } + } + + // If strarr[i] is a Token, then the match starts inside another Token, which is invalid + if (strarr[i] instanceof Token) { + continue; + } + + // Number of tokens to delete and replace with the new match + delNum = k - i; + str = text.slice(pos, p); + match.index -= pos; + } else { + pattern.lastIndex = 0; + + var match = pattern.exec(str), + delNum = 1; + } + + if (!match) { + if (oneshot) { + break; + } + + continue; + } + + if (lookbehind) { + lookbehindLength = match[1] ? match[1].length : 0; + } + + var from = match.index + lookbehindLength, + match = match[0].slice(lookbehindLength), + to = from + match.length, + before = str.slice(0, from), + after = str.slice(to); + + var args = [i, delNum]; + + if (before) { + ++i; + pos += before.length; + args.push(before); + } + + var wrapped = new Token( + token, + inside ? _.tokenize(match, inside) : match, + alias, + match, + greedy + ); + + args.push(wrapped); + + if (after) { + args.push(after); + } + + Array.prototype.splice.apply(strarr, args); + + if (delNum != 1) + _.matchGrammar(text, strarr, grammar, i, pos, true, token); + + if (oneshot) break; + } + } + } + }, + + hooks: { + add: function () {}, + }, + + tokenize: function (text, grammar, language) { + var strarr = [text]; + + var rest = grammar.rest; + + if (rest) { + for (var token in rest) { + grammar[token] = rest[token]; + } + + delete grammar.rest; + } + + _.matchGrammar(text, strarr, grammar, 0, 0, false); + + return strarr; + }, + }; + + var Token = (_.Token = function (type, content, alias, matchedStr, greedy) { + this.type = type; + this.content = content; + this.alias = alias; + // Copy of the full string this token was created from + this.length = (matchedStr || "").length | 0; + this.greedy = !!greedy; + }); + + Token.stringify = function (o, language, parent) { + if (typeof o == "string") { + return o; + } + + if (_.util.type(o) === "Array") { + return o + .map(function (element) { + return Token.stringify(element, language, o); + }) + .join(""); + } + + var env = { + type: o.type, + content: Token.stringify(o.content, language, parent), + tag: "span", + classes: ["token", o.type], + attributes: {}, + language: language, + parent: parent, + }; + + if (o.alias) { + var aliases = _.util.type(o.alias) === "Array" ? o.alias : [o.alias]; + Array.prototype.push.apply(env.classes, aliases); + } + + var attributes = Object.keys(env.attributes) + .map(function (name) { + return ( + name + + '="' + + (env.attributes[name] || "").replace(/"/g, """) + + '"' + ); + }) + .join(" "); + + return ( + "<" + + env.tag + + ' class="' + + env.classes.join(" ") + + '"' + + (attributes ? " " + attributes : "") + + ">" + + env.content + + "" + ); + }; + + return _; +})(); + +export default Prism;