From 02896ff849fae7e2c21cc9a624ddaf4ef697b067 Mon Sep 17 00:00:00 2001 From: nisstyre56 Date: Sat, 26 Apr 2014 04:14:11 -0400 Subject: [PATCH 1/2] update README to actually explain the current status of this --- README.md | 19 +++++++++++++------ tokenize.js | 15 --------------- 2 files changed, 13 insertions(+), 21 deletions(-) diff --git a/README.md b/README.md index 1bb6994..69e5928 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,13 @@ -This is the beginnings of a little language based on a typed LC with let -and mainly inspired by these: - * http://cs.brown.edu/~sk/Publications/Books/ProgLangs/ - * http://www.cs.indiana.edu/hyplan/dfried/ts.ps (I plan on generating code in - trampolined style) - * http://lucacardelli.name/papers/basictypechecking.a4.pdf +JLambda is a functional language in the spirit of languages such as Scheme, +SML, or Clean. It aims to have a very flexible syntax and a clean and easy to +understand type system. Another goal is to generate very efficient JavaScript +code and possibly native code as well. Currently the type system is still being +conceived, and the various parts that conspire to generate actual code are +being written and will likely change quite a bit. It is possible to parse code +and generate a pretty printed version of it (see example.jl for what the syntax +looks like at the moment). + +JLambda also aims to support concurrency which will be built on a +continuation-passing style intermediate language. I have not figured out how +scheduling threads will work, or whether I will provide any programmer directed +way of scheduling (i.e. yield). diff --git a/tokenize.js b/tokenize.js index 3789446..3e7459e 100755 --- a/tokenize.js +++ b/tokenize.js @@ -255,21 +255,6 @@ function tokenize(tokstream, matchop) { tokstream = tokstream.substr(i); break; - /* falls through */ - /*case 45: // '-' - lambda = peek(tokstream, "arrow", "->"); - if (false) { - tokens.push($.extend(lambda, [charnum, linenum])); - tokstream = tokstream.substr(2); - break; - } - else { - tokens.push(["identifier", "-", charnum, linenum]); - charnum++; - tokstream = tokstream.substr(1); - break; - } - /* falls through */ case 46: // '.' if (isDigit(tokstream[1])) { -- 2.30.2 From 0297fbd5afc38dc73c0c0bb77f82a312f023f5f2 Mon Sep 17 00:00:00 2001 From: nisstyre56 Date: Sat, 26 Apr 2014 14:42:14 -0400 Subject: [PATCH 2/2] fixed bug with defop parsing, wrong token type in the pattern --- closure_conversion.js | 1 - parse.js | 7 ++++--- tokenize.js | 6 +++--- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/closure_conversion.js b/closure_conversion.js index 4865aea..ee60731 100644 --- a/closure_conversion.js +++ b/closure_conversion.js @@ -152,7 +152,6 @@ function test(src) { } //console.log(test(pprint.pprint(parser.parse(pprint.pprint(parser.parse("if something then if a then if b then c else d else rtrrt else some_other_thing")[0]))[0]))); -console.log(pprint.pprint(parser.parse("defop 1 Right (a $# b) (a - b) def main (4 $# b $# c)")[1])); module.export = { test : test, closureConvert : closure_convert_all diff --git a/parse.js b/parse.js index ad8846e..4f5e520 100755 --- a/parse.js +++ b/parse.js @@ -636,7 +636,8 @@ function parseFull(tokenized) { module.exports = { parse : function(str) { return parseFull(tokenizer.tokenize(str)); - } + }, + tokenize : tokenizer.tokenize }; -/*var istr = fs.readFileSync('/dev/stdin').toString(); -console.log(parseFull(tokenizer.tokenize(istr)).map(pprint.pprint));*/ +var istr = fs.readFileSync('/dev/stdin').toString(); +console.log(parseFull(tokenizer.tokenize(istr)).map(pprint.pprint)); diff --git a/tokenize.js b/tokenize.js index 3e7459e..cb2d6bb 100755 --- a/tokenize.js +++ b/tokenize.js @@ -25,7 +25,7 @@ function isWhitespace(c) { var code = c.charCodeAt(); if (isNaN(code)) { - return false; + return true; } return (code === 9 || code === 32 || @@ -388,12 +388,12 @@ function tokenizeHelp(input, matchop, strip_whitespace) { } } -var defop_pattern = ["defop", "integer", "identifier", +var defop_pattern = ["defop", "integer", "constructor", "left_paren", "identifier", "identifier", "identifier", "right_paren"]; function tokenizeFull(input) { - var matchop = $.opMatch(operators); + var matchop; var initialPass = tokenizeHelp(input, _.constant(false), true).reverse(); for (var i = 0; i < initialPass.length; i++) { if (initialPass.slice(i, i+8).map(_.first).every( -- 2.30.2