Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix typos #443

Merged
merged 2 commits into from Oct 18, 2021
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
2 changes: 1 addition & 1 deletion examples/c_files/memmgr.h
Expand Up @@ -49,7 +49,7 @@
// minimize pool fragmentation in case of multiple allocations
// and deallocations, it is advisable to not allocate
// blocks that are too small.
// This flag sets the minimal ammount of quantas for
// This flag sets the minimal amount of quantas for
// an allocation. If the size of a ulong is 4 and you
// set this flag to 16, the minimal size of an allocation
// will be 4 * 2 * 16 = 128 bytes
Expand Down
4 changes: 2 additions & 2 deletions examples/c_files/year.c
Expand Up @@ -7,8 +7,8 @@ void convert(int thousands, int hundreds, int tens, int ones)
char *num[] = {"", "One", "Two", "Three", "Four", "Five", "Six",
"Seven", "Eight", "Nine"};

char *for_ten[] = {"", "", "Twenty", "Thirty", "Fourty", "Fifty", "Sixty",
"Seventy", "Eighty", "Ninty"};
char *for_ten[] = {"", "", "Twenty", "Thirty", "Forty", "Fifty", "Sixty",
"Seventy", "Eighty", "Ninety"};

char *af_ten[] = {"Ten", "Eleven", "Twelve", "Thirteen", "Fourteen",
"Fifteen", "Sixteen", "Seventeen", "Eighteen", "Ninteen"};
Expand Down
12 changes: 6 additions & 6 deletions pycparser/c_generator.py
Expand Up @@ -97,23 +97,23 @@ def visit_BinaryOp(self, n):
#
# If `n.left.op` has a stronger or equally binding precedence in
# comparison to `n.op`, no parenthesis are needed for the left:
# e.g., `(a*b) + c` is equivelent to `a*b + c`, as well as
# `(a+b) - c` is equivelent to `a+b - c` (same precedence).
# e.g., `(a*b) + c` is equivalent to `a*b + c`, as well as
# `(a+b) - c` is equivalent to `a+b - c` (same precedence).
# If the left operator is weaker binding than the current, then
# parentheses are necessary:
# e.g., `(a+b) * c` is NOT equivelent to `a+b * c`.
# e.g., `(a+b) * c` is NOT equivalent to `a+b * c`.
lval_str = self._parenthesize_if(
n.left,
lambda d: not (self._is_simple_node(d) or
self.reduce_parentheses and isinstance(d, c_ast.BinaryOp) and
self.precedence_map[d.op] >= self.precedence_map[n.op]))
# If `n.right.op` has a stronger -but not equal- binding precedence,
# parenthesis can be omitted on the right:
# e.g., `a + (b*c)` is equivelent to `a + b*c`.
# e.g., `a + (b*c)` is equivalent to `a + b*c`.
# If the right operator is weaker or equally binding, then parentheses
# are necessary:
# e.g., `a * (b+c)` is NOT equivelent to `a * b+c` and
# `a - (b+c)` is NOT equivelent to `a - b+c` (same precedence).
# e.g., `a * (b+c)` is NOT equivalent to `a * b+c` and
# `a - (b+c)` is NOT equivalent to `a - b+c` (same precedence).
rval_str = self._parenthesize_if(
n.right,
lambda d: not (self._is_simple_node(d) or
Expand Down
4 changes: 2 additions & 2 deletions pycparser/c_lexer.py
Expand Up @@ -171,7 +171,7 @@ def _make_tok_location(self, token):
# Conditional operator (?)
'CONDOP',

# Delimeters
# Delimiters
'LPAREN', 'RPAREN', # ( )
'LBRACKET', 'RBRACKET', # [ ]
'LBRACE', 'RBRACE', # { }
Expand Down Expand Up @@ -415,7 +415,7 @@ def t_NEWLINE(self, t):
# ?
t_CONDOP = r'\?'

# Delimeters
# Delimiters
t_LPAREN = r'\('
t_RPAREN = r'\)'
t_LBRACKET = r'\['
Expand Down
4 changes: 2 additions & 2 deletions pycparser/c_parser.py
Expand Up @@ -531,7 +531,7 @@ def p_translation_unit_2(self, p):
# Declarations always come as lists (because they can be
# several in one line), so we wrap the function definition
# into a list as well, to make the return value of
# external_declaration homogenous.
# external_declaration homogeneous.
def p_external_declaration_1(self, p):
""" external_declaration : function_definition
"""
Expand Down Expand Up @@ -788,7 +788,7 @@ def p_declaration_specifiers_no_type_3(self, p):
"""
p[0] = self._add_declaration_specifier(p[2], p[1], 'function')

# Withot this, `typedef _Atomic(T) U` will parse incorrectly because the
# Without this, `typedef _Atomic(T) U` will parse incorrectly because the
# _Atomic qualifier will match, instead of the specifier.
def p_declaration_specifiers_no_type_4(self, p):
""" declaration_specifiers_no_type : atomic_specifier declaration_specifiers_no_type_opt
Expand Down
4 changes: 2 additions & 2 deletions pycparser/ply/ctokens.py
Expand Up @@ -30,7 +30,7 @@
# Ternary operator (?)
'TERNARY',

# Delimeters ( ) [ ] { } , . ; :
# Delimiters ( ) [ ] { } , . ; :
'LPAREN', 'RPAREN',
'LBRACKET', 'RBRACKET',
'LBRACE', 'RBRACE',
Expand Down Expand Up @@ -86,7 +86,7 @@
# ?
t_TERNARY = r'\?'

# Delimeters
# Delimiters
t_LPAREN = r'\('
t_RPAREN = r'\)'
t_LBRACKET = r'\['
Expand Down
4 changes: 2 additions & 2 deletions pycparser/ply/yacc.py
Expand Up @@ -32,7 +32,7 @@
# -----------------------------------------------------------------------------
#
# This implements an LR parser that is constructed from grammar rules defined
# as Python functions. The grammer is specified by supplying the BNF inside
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please revert all changes in the ply/ directory - it's a vendored dependency and I don't want any changes in there

# as Python functions. The grammar is specified by supplying the BNF inside
# Python documentation strings. The inspiration for this technique was borrowed
# from John Aycock's Spark parsing system. PLY might be viewed as cross between
# Spark and the GNU bison utility.
Expand Down Expand Up @@ -1500,7 +1500,7 @@ def __init__(self, terminals):
self.Precedence = {} # Precedence rules for each terminal. Contains tuples of the
# form ('right',level) or ('nonassoc', level) or ('left',level)

self.UsedPrecedence = set() # Precedence rules that were actually used by the grammer.
self.UsedPrecedence = set() # Precedence rules that were actually used by the grammar.
# This is only used to provide error checking and to generate
# a warning about unused precedence rules.

Expand Down
4 changes: 2 additions & 2 deletions pycparser/plyparser.py
@@ -1,7 +1,7 @@
#-----------------------------------------------------------------
# plyparser.py
#
# PLYParser class and other utilites for simplifying programming
# PLYParser class and other utilities for simplifying programming
# parsers with PLY
#
# Eli Bendersky [https://eli.thegreenplace.net/]
Expand Down Expand Up @@ -53,7 +53,7 @@ def _coord(self, lineno, column=None):
column=column)

def _token_coord(self, p, token_idx):
""" Returns the coordinates for the YaccProduction objet 'p' indexed
""" Returns the coordinates for the YaccProduction object 'p' indexed
with 'token_idx'. The coordinate includes the 'lineno' and
'column'. Both follow the lex semantic, starting from 1.
"""
Expand Down
2 changes: 1 addition & 1 deletion tests/c_files/memmgr.h
Expand Up @@ -49,7 +49,7 @@
// minimize pool fragmentation in case of multiple allocations
// and deallocations, it is advisable to not allocate
// blocks that are too small.
// This flag sets the minimal ammount of quantas for
// This flag sets the minimal amount of quantas for
// an allocation. If the size of a ulong is 4 and you
// set this flag to 16, the minimal size of an allocation
// will be 4 * 2 * 16 = 128 bytes
Expand Down
4 changes: 2 additions & 2 deletions tests/c_files/year.c
Expand Up @@ -12,8 +12,8 @@ void convert(int thousands, int hundreds, int tens, int ones)
char *num[] = {"", "One", "Two", "Three", "Four", "Five", "Six",
"Seven", "Eight", "Nine"};

char *for_ten[] = {"", "", "Twenty", "Thirty", "Fourty", "Fifty", "Sixty",
"Seventy", "Eighty", "Ninty"};
char *for_ten[] = {"", "", "Twenty", "Thirty", "Forty", "Fifty", "Sixty",
"Seventy", "Eighty", "Ninety"};

char *af_ten[] = {"Ten", "Eleven", "Twelve", "Thirteen", "Fourteen",
"Fifteen", "Sixteen", "Seventeen", "Eighteen", "Ninteen"};
Expand Down
4 changes: 2 additions & 2 deletions tests/test_c_parser.py
Expand Up @@ -179,7 +179,7 @@ def test_initial_semi(self):
def test_coords(self):
""" Tests the "coordinates" of parsed elements - file
name, line and column numbers, with modification
insterted by #line directives.
inserted by #line directives.
"""
self.assert_coord(self.parse('int a;').ext[0], 1, 5)

Expand Down Expand Up @@ -1171,7 +1171,7 @@ def test_anonymous_struct_union(self):
['TypeDecl',
['IdentifierType', ['float']]]]]]]])

# ISO/IEC 9899:201x Commitee Draft 2010-11-16, N1539
# ISO/IEC 9899:201x Committee Draft 2010-11-16, N1539
# section 6.7.2.1, par. 19, example 1
s3 = """
struct v {
Expand Down
24 changes: 12 additions & 12 deletions utils/benchmark/inputs/sqlite-btree.c.ppout
Expand Up @@ -3652,7 +3652,7 @@ struct Btree {
sqlite3 *db;
BtShared *pBt;
u8 inTrans;
u8 sharable;
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Revert this file too - it's vendored in

u8 shareable;
u8 locked;
u8 hasIncrblobCur;
int wantToLock;
Expand Down Expand Up @@ -3785,7 +3785,7 @@ static int querySharedCacheTableLock(Btree *p, Pgno iTab, u8 eLock){
assert( eLock==1 || pBt->inTransaction==2 );


if( !p->sharable ){
if( !p->shareable ){
return 0;
}

Expand Down Expand Up @@ -3830,7 +3830,7 @@ static int setSharedCacheTableLock(Btree *p, Pgno iTable, u8 eLock){



assert( p->sharable );
assert( p->shareable );
assert( 0==querySharedCacheTableLock(p, iTable, eLock) );


Expand Down Expand Up @@ -3872,7 +3872,7 @@ static void clearAllSharedCacheTableLocks(Btree *p){
BtLock **ppIter = &pBt->pLock;

assert( sqlite3BtreeHoldsMutex(p) );
assert( p->sharable || 0==*ppIter );
assert( p->shareable || 0==*ppIter );
assert( p->inTrans>0 );

while( *ppIter ){
Expand Down Expand Up @@ -5378,7 +5378,7 @@ int sqlite3BtreeOpen(
char *zFullPathname = sqlite3Malloc(((nFullPathname)>(nFilename)?(nFullPathname):(nFilename)));
sqlite3_mutex *mutexShared;

p->sharable = 1;
p->shareable = 1;
if( !zFullPathname ){
sqlite3_free(p);
return 7;
Expand Down Expand Up @@ -5498,7 +5498,7 @@ int sqlite3BtreeOpen(


pBt->nRef = 1;
if( p->sharable ){
if( p->shareable ){
sqlite3_mutex *mutexShared;
mutexShared = sqlite3MutexAlloc(2);
if( 1 && sqlite3Config.bCoreMutex ){
Expand All @@ -5521,11 +5521,11 @@ int sqlite3BtreeOpen(



if( p->sharable ){
if( p->shareable ){
int i;
Btree *pSib;
for(i=0; i<db->nDb; i++){
if( (pSib = db->aDb[i].pBt)!=0 && pSib->sharable ){
if( (pSib = db->aDb[i].pBt)!=0 && pSib->shareable ){
while( pSib->pPrev ){ pSib = pSib->pPrev; }
if( (uptr)p->pBt<(uptr)pSib->pBt ){
p->pNext = pSib;
Expand Down Expand Up @@ -5679,7 +5679,7 @@ int sqlite3BtreeClose(Btree *p){


assert( p->wantToLock==0 && p->locked==0 );
if( !p->sharable || removeFromSharingList(pBt) ){
if( !p->shareable || removeFromSharingList(pBt) ){



Expand Down Expand Up @@ -6193,7 +6193,7 @@ int sqlite3BtreeBeginTrans(Btree *p, int wrflag, int *pSchemaVersion){
if( p->inTrans==0 ){
pBt->nTransaction++;

if( p->sharable ){
if( p->shareable ){
assert( p->lock.pBtree==p && p->lock.iTable==1 );
p->lock.eLock = 1;
p->lock.pNext = pBt->pLock;
Expand Down Expand Up @@ -11268,7 +11268,7 @@ int sqlite3BtreeSchemaLocked(Btree *p){
int sqlite3BtreeLockTable(Btree *p, int iTab, u8 isWriteLock){
int rc = 0;
assert( p->inTrans!=0 );
if( p->sharable ){
if( p->shareable ){
u8 lockType = 1 + isWriteLock;
assert( 1 +1==2 );
assert( isWriteLock==0 || isWriteLock==1 );
Expand Down Expand Up @@ -11383,7 +11383,7 @@ int sqlite3HeaderSizeBtree(void){ return (((sizeof(MemPage))+7)&~7); }


int sqlite3BtreeSharable(Btree *p){
return p->sharable;
return p->shareable;
}


Expand Down
2 changes: 1 addition & 1 deletion utils/internal/zc.c
Expand Up @@ -12,7 +12,7 @@
void print_help(int exval);

int main (int argc, char *argv[]) {
/* word delimeter for strtok() */
/* word delimiter for strtok() */
char delim[] = ".,:;`/\"+-_(){}[]<>*&^%$#@!?~/|\\=1234567890 \t\n";
char line[MAXLINE]; /* input buff, fgets() */
char *stray = NULL; /* returned value by strtok() */
Expand Down