@ -6,10 +6,10 @@ var error = require("./errors.js");
var operators = Object . keys ( rep . OPInfo ) ;
var operators = Object . keys ( rep . OPInfo ) ;
var _ = require ( "underscore" ) ;
var _ = require ( "underscore" ) ;
function isDigit ( a ) {
function isDigit ( c ) {
if ( ! a )
if ( ! c )
return false ;
return false ;
var code = a . charCodeAt ( ) ;
var code = c . charCodeAt ( ) ;
if ( isNaN ( code ) ) {
if ( isNaN ( code ) ) {
return false ;
return false ;
}
}
@ -19,11 +19,11 @@ function isDigit(a) {
code > 46 ) ;
code > 46 ) ;
}
}
function isWhitespace ( a ) {
function isWhitespace ( c ) {
if ( ! a )
if ( ! c )
return true ;
return true ;
var code = a . charCodeAt ( ) ;
var code = c . charCodeAt ( ) ;
if ( isNaN ( code ) ) {
if ( isNaN ( code ) ) {
return false ;
return false ;
}
}
@ -34,8 +34,8 @@ function isWhitespace(a) {
code === 11 ) ;
code === 11 ) ;
}
}
function isIdentifier ( a ) {
function isIdentifier ( c ) {
var code = a . charCodeAt ( ) ;
var code = c . charCodeAt ( ) ;
return ( ! isNaN ( code ) &&
return ( ! isNaN ( code ) &&
code !== 41 &&
code !== 41 &&
code !== 40 &&
code !== 40 &&
@ -46,6 +46,13 @@ function isIdentifier(a) {
code !== 44 ) ;
code !== 44 ) ;
}
}
function isUpper ( c ) {
var code = c . charCodeAt ( ) ;
return ( ! isNaN ( code ) &&
( code >= 65 ) &&
( code <= 90 ) ) ;
}
function tokenizeNum ( tokstream , charnum , linenum ) {
function tokenizeNum ( tokstream , charnum , linenum ) {
var number = [ ] ;
var number = [ ] ;
var code = tokstream [ 0 ] . charCodeAt ( ) ;
var code = tokstream [ 0 ] . charCodeAt ( ) ;
@ -99,7 +106,10 @@ function tokenizeNum(tokstream, charnum, linenum) {
* Everything after the operator goes back on to the token stream
* Everything after the operator goes back on to the token stream
* /
* /
function tokenizeIdent ( tokstream , matchop , charnum , linenum ) {
function tokenizeIdent ( tokstream ,
matchop ,
charnum ,
linenum ) {
var identifier = [ ] ;
var identifier = [ ] ;
var n = 0 ;
var n = 0 ;
while ( ( ! isWhitespace ( tokstream [ 0 ] ) ) && isIdentifier ( tokstream [ 0 ] ) && ! matchop ( tokstream ) ) {
while ( ( ! isWhitespace ( tokstream [ 0 ] ) ) && isIdentifier ( tokstream [ 0 ] ) && ! matchop ( tokstream ) ) {
@ -113,6 +123,18 @@ function tokenizeIdent(tokstream, matchop, charnum, linenum) {
return [ [ n , [ "identifier" , identifier , charnum , linenum ] ] ] ;
return [ [ n , [ "identifier" , identifier , charnum , linenum ] ] ] ;
}
}
function tokenizeCtor ( tokstream ,
matchop ,
charnum ,
linenum ) {
var ident = tokenizeIdent ( tokstream ,
matchop ,
charnum ,
linenum ) ;
ident [ 0 ] [ 1 ] [ 0 ] = "constructor" ;
return ident ;
}
function tokenizeStr ( tokstream , charnum , linenum ) {
function tokenizeStr ( tokstream , charnum , linenum ) {
var stringlit = [ ] ;
var stringlit = [ ] ;
var n = 1 ;
var n = 1 ;
@ -232,9 +254,9 @@ function tokenize(tokstream, matchop) {
break ;
break ;
/* falls through */
/* falls through */
case 45 : // '-'
/*case 45: / / '-'
lambda = peek ( tokstream , "arrow" , "->" ) ;
lambda = peek ( tokstream , "arrow" , "->" ) ;
if ( lambda ) {
if ( false ) {
tokens . push ( $ . extend ( lambda , [ charnum , linenum ] ) ) ;
tokens . push ( $ . extend ( lambda , [ charnum , linenum ] ) ) ;
tokstream = tokstream . substr ( 2 ) ;
tokstream = tokstream . substr ( 2 ) ;
break ;
break ;
@ -346,7 +368,12 @@ function tokenize(tokstream, matchop) {
tokens . push ( [ "identifier" , op , charnum , linenum ] ) ;
tokens . push ( [ "identifier" , op , charnum , linenum ] ) ;
}
}
else {
else {
result = tokenizeIdent ( tokstream , matchop , charnum , linenum ) ;
if ( isUpper ( tokstream [ 0 ] ) ) {
result = tokenizeCtor ( tokstream , matchop , charnum , linenum ) ;
}
else {
result = tokenizeIdent ( tokstream , matchop , charnum , linenum ) ;
}
for ( var index = 0 ; index < result . length ; index ++ ) {
for ( var index = 0 ; index < result . length ; index ++ ) {
charnum ++ ;
charnum ++ ;
tokens . push ( result [ index ] [ 1 ] ) ;
tokens . push ( result [ index ] [ 1 ] ) ;
@ -396,5 +423,6 @@ function tokenizeFull(input) {
return tokenizeHelp ( input , matchop , true ) ;
return tokenizeHelp ( input , matchop , true ) ;
}
}
module . exports = { tokenize : tokenizeFull ,
module . exports = { tokenize : tokenizeFull ,
isIdentifier : isIdentifier } ;
isIdentifier : isIdentifier } ;