-
-
Notifications
You must be signed in to change notification settings - Fork 62
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
types: add type definitions for micromark
- Loading branch information
1 parent
005ad17
commit 8f64181
Showing
27 changed files
with
363 additions
and
1 deletion.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,5 @@ | ||
// TypeScript Version: 3.0 | ||
|
||
import buffer = require('./lib') | ||
|
||
export = buffer |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,5 @@ | ||
// TypeScript Version: 3.0 | ||
|
||
import buffer = require('./buffer') | ||
|
||
export = buffer |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,3 @@ | ||
declare function compileHTML(): void | ||
|
||
export = compileHTML |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,5 @@ | ||
import {ParserOptions} from './parse' | ||
|
||
declare function buffer(options?: ParserOptions): void | ||
|
||
export = buffer |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,11 @@ | ||
import {Parser} from '../shared-types' | ||
|
||
declare namespace createParser { | ||
interface ParserOptions { | ||
extensions: unknown[] | ||
} | ||
} | ||
|
||
declare function createParser(options?: createParser.ParserOptions): Parser | ||
|
||
export = createParser |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,3 @@ | ||
declare function postprocess(): void | ||
|
||
export = postprocess |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,3 @@ | ||
declare function preprocess(): void | ||
|
||
export = preprocess |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,5 @@ | ||
import {ParserOptions} from './parse' | ||
|
||
declare function stream(options?: ParserOptions): void | ||
|
||
export = stream |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,3 @@ | ||
declare function classifyCharacter(code: number): number | ||
|
||
export = classifyCharacter |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,9 @@ | ||
import {Parser, Point, Tokenizer} from '../../shared-types' | ||
|
||
declare function createTokenizer( | ||
parser: Parser, | ||
initialize: unknown, | ||
from: Point | ||
): Tokenizer | ||
|
||
export = createTokenizer |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,15 @@ | ||
/** | ||
* @param array array to flatten | ||
* @param map mapping function | ||
* @param a passed to map function | ||
* @typeParam T shape of item input to flatMap | ||
* @typeParam U shape of item returned by flatMap | ||
* @typeParam A shape of additional attribute passed | ||
*/ | ||
declare function flatMap<T, U, A>( | ||
array: T[][], | ||
map: (array: T[], a: A) => U[], | ||
a?: A | ||
): U[] | ||
|
||
export = flatMap |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,3 @@ | ||
declare function lowercase(code: number): number | ||
|
||
export = lowercase |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,5 @@ | ||
import {Point} from 'shared-types' | ||
|
||
declare function movePoint(point: Point, offset: number): Point | ||
|
||
export = movePoint |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,3 @@ | ||
declare function normalizeIdentifier(value: string): string | ||
|
||
export = normalizeIdentifier |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,3 @@ | ||
declare function normalizeUri(url: string): string | ||
|
||
export = normalizeUri |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,5 @@ | ||
import {Event, Type} from '../../shared-types' | ||
|
||
declare function prefixSize(events: Event[], type: Type): number | ||
|
||
export = prefixSize |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,3 @@ | ||
declare function safeFromInt(value: string, base?: number): string | ||
|
||
export = safeFromInt |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,3 @@ | ||
declare function serializeChunks(chunks: Array<string | number>): string | ||
|
||
export = serializeChunks |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,3 @@ | ||
declare function shallow<T>(object: T): T | ||
|
||
export = shallow |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,5 @@ | ||
import {Token} from '../../shared-types' | ||
|
||
declare function sliceChunks(chunks: string[], token: Token): string[] | ||
|
||
export = sliceChunks |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,5 @@ | ||
import {Event} from '../../shared-types' | ||
|
||
declare function subtokenize(events: Event[]): {done: boolean; events: Event[]} | ||
|
||
export = subtokenize |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,5 @@ | ||
import micromarkBuffer = require('micromark') | ||
import micromarkStream = require('micromark/stream') | ||
|
||
micromarkBuffer() | ||
micromarkStream() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,220 @@ | ||
// TypeScript Version: 3.0 | ||
|
||
/** | ||
* A location in a string or buffer | ||
*/ | ||
export interface Point { | ||
line: number | ||
column: number | ||
offset: number | ||
_index?: number | ||
_bufferIndex?: number | ||
} | ||
|
||
/** | ||
* A token type | ||
* | ||
* TODO: enumerate token types | ||
*/ | ||
export type Type = string | ||
|
||
/** | ||
* | ||
*/ | ||
export interface Token { | ||
type: Type | ||
start: Point | ||
end: Point | ||
|
||
previous: Token | ||
next: Token | ||
|
||
/** | ||
* Declares a token as having content of a certain type. | ||
* Because markdown requires to first parse containers, flow, content completely, | ||
* and then later go on to phrasing and such, it needs to be declared somewhere on the tokens. | ||
*/ | ||
contentType: 'flow' | 'content' | 'string' | 'text' | ||
|
||
// TODO move these to interfaces extending Token, or move these to a `data` property similar to unist (would require code refactor) | ||
/** | ||
* Used for whitespace in several places which needs to account for tab stops | ||
*/ | ||
_size?: number | ||
|
||
/** | ||
* ends with a CR, LF, or CRLF. | ||
*/ | ||
_break?: boolean | ||
|
||
/** | ||
* Used when dealing with linked tokens. A child tokenizer is needed to tokenize them, which is stored on those tokens | ||
*/ | ||
_tokenizer?: Tokenizer | ||
|
||
/** | ||
* Used for attention (emphasis, strong). | ||
* | ||
* could be (enter emphasis, enter emphasisMarker, exit emphasisMarker, enter strong, enter strongMarker, exit strongMarker, enter data, exit data) | ||
*/ | ||
_events?: Event[] | ||
|
||
/** | ||
* This is used for tokens that are already “subtokenized”. | ||
* | ||
* E.g., when parsing flow, there are content tokens, but those are directly tokenized into definitions/setext/paragraphs | ||
*/ | ||
_subevents?: Event[] | ||
|
||
/** | ||
* Set to true to mark that a token (e.g., with subevents) is already handled | ||
*/ | ||
_contentTokenized?: boolean | ||
|
||
/** | ||
* close and open are also used in attention: | ||
* depending on the characters before and after sequences (**), | ||
* the sequence can open, close, both, or none | ||
*/ | ||
_open?: boolean | ||
|
||
/** | ||
* close and open are also used in attention: | ||
* depending on the characters before and after sequences (**), | ||
* the sequence can open, close, both, or none | ||
*/ | ||
_close?: boolean | ||
_marker?: number | ||
_side?: number | ||
|
||
/** | ||
* Generally, tabs and spaces behave the same, but in the case of a hard break through trailing spaces ( \n), tabs | ||
*/ | ||
_tabs?: boolean | ||
} | ||
|
||
/** | ||
* | ||
*/ | ||
export type Event = [string, Token, unknown] | ||
|
||
/** | ||
* These these are transitions to update the CommonMark State Machine (CSMS) | ||
*/ | ||
export interface Effects { | ||
/** | ||
* Enter and exit define where tokens start and end | ||
*/ | ||
enter: (type: Type) => void | ||
|
||
/** | ||
* Enter and exit define where tokens start and end | ||
*/ | ||
exit: (type: Type) => void | ||
|
||
/** | ||
* Consume deals with a character, and moves to the next | ||
*/ | ||
consume: (code: number) => void | ||
|
||
/** | ||
* Attempt deals with several values, and tries to parse according to those values. | ||
* If a value resulted in `ok`, it worked, the tokens that were made are used, | ||
* and `returnState` is switched to. | ||
* If the result is `nok`, the attempt failed, | ||
* so we revert to the original state, and `bogusState` is used. | ||
*/ | ||
attempt: ( | ||
notSureWhatThisIs: | ||
| Construct | ||
| Construct[] | ||
| {[code: number]: Construct | Construct[]}, | ||
returnState: unknown, | ||
bogusState?: unknown | ||
) => (code: number) => void | ||
|
||
/** | ||
* interrupt is used for stuff right after a line of content. | ||
*/ | ||
interrupt: ( | ||
notSureWhatThisIs: | ||
| Construct | ||
| Construct[] | ||
| {[code: number]: Construct | Construct[]}, | ||
ok: Okay, | ||
nok?: NotOkay | ||
) => (code: number) => void | ||
|
||
check: ( | ||
notSureWhatThisIs: | ||
| Construct | ||
| Construct[] | ||
| {[code: number]: Construct | Construct[]}, | ||
ok: Okay, | ||
nok?: NotOkay | ||
) => (code: number) => void | ||
|
||
/** | ||
* lazy is used for lines that were not properly preceded by the container. | ||
*/ | ||
lazy: (notSureWhatThisIs: | ||
| Construct | ||
| Construct[] | ||
| {[code: number]: Construct | Construct[]}, | ||
ok: Okay, | ||
nok?: NotOkay) => void | ||
} | ||
|
||
/** | ||
* | ||
*/ | ||
export type Okay = (code: number) => () => void | ||
|
||
/** | ||
* | ||
*/ | ||
export type NotOkay = Okay | ||
|
||
/** | ||
* | ||
*/ | ||
export interface Tokenizer { | ||
previous: Token | ||
events: Event[] | ||
parser: Parser | ||
sliceStream: (token: Token) => string[] | ||
sliceSerialize: (token: Token) => string | ||
now: () => Point | ||
defineSkip: (value: Point) => void | ||
write: (value: number) => Event[] | ||
} | ||
|
||
export type Construct = unknown | ||
|
||
/** | ||
* | ||
*/ | ||
export interface Parser { | ||
hooks: { | ||
[key: string]: Construct | Construct[] | ||
} | ||
flow: (something: unknown) => unknown | ||
defined: unknown[] | ||
} | ||
|
||
/** | ||
* | ||
*/ | ||
export interface TokenizerThis { | ||
events: Event[] | ||
interrupt: unknown | ||
lazy: unknown | ||
containerState: { | ||
marker: number | ||
type: Type | ||
initialBlankLine: unknown | ||
size: number | ||
_closeFlow: unknown | ||
furtherBlankLines: unknown | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,5 @@ | ||
// TypeScript Version: 3.0 | ||
|
||
import stream = require('./lib/steam') | ||
|
||
export = stream |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,18 @@ | ||
{ | ||
"compilerOptions": { | ||
"moduleResolution": "node", | ||
"lib": [ | ||
"ES5" | ||
], | ||
"strict": true, | ||
"baseUrl": ".", | ||
"paths": { | ||
"micromark": [ | ||
"index.d.ts" | ||
], | ||
"micromark/stream": [ | ||
"stream.d.ts" | ||
] | ||
} | ||
} | ||
} |
Oops, something went wrong.