Browse Source

better error handling

master
wes 8 years ago
parent
commit
1134c9873f
  1. 6638
      server/assets/bundle.js
  2. 29
      server/parse.js
  3. 3
      server/rollup.config.js
  4. 12
      server/tags/test.tag
  5. 20
      server/tokenize.js

6638
server/assets/bundle.js

File diff suppressed because one or more lines are too long

29
server/parse.js

@ -789,6 +789,11 @@ function parse(tokens) {
return parseIf(tokens, linenum, charnum); return parseIf(tokens, linenum, charnum);
} }
else if (toktype === "left_paren") { else if (toktype === "left_paren") {
if (tokens.length == 0) {
throw error.JSyntaxError(linenum,
charnum,
"Unexpected end of source");
}
if (fst(tokens)[0] === "lambda") { if (fst(tokens)[0] === "lambda") {
tokens.pop(); tokens.pop();
var parsed = parseLambda(tokens, linenum, charnum); var parsed = parseLambda(tokens, linenum, charnum);
@ -810,24 +815,14 @@ function parseFull(tokenized) {
var ast = []; var ast = [];
var typeBindings = {}; var typeBindings = {};
var current; var current;
try { while (tokenized.length > 0) {
while (tokenized.length > 0) { current = closure.annotate_fvs(desugarer.desugar(parse(tokenized), typeBindings));
current = closure.annotate_fvs(desugarer.desugar(parse(tokenized), typeBindings)); ast.push(current);
ast.push(current);
}
return {
"ast" : ast,
"types" : typeBindings
};
} catch (e) {
if (e.stxerror !== undefined) {
e.stxerror();
console.error("Tokenization error");
}
else {
console.log(e.errormessage);
}
} }
return {
"ast" : ast,
"types" : typeBindings
};
} }
export default { parse : function(str) { export default { parse : function(str) {

3
server/rollup.config.js

@ -14,8 +14,7 @@ export default {
commonjs({ commonjs({
'./vm.js' : ['evaluateString'] './vm.js' : ['evaluateString']
}), }),
buble(), buble()
uglify()
], ],
format: 'iife' format: 'iife'
} }

12
server/tags/test.tag

@ -25,7 +25,17 @@ self.default = "";
evaluate(ev) { evaluate(ev) {
ev.preventDefault(); ev.preventDefault();
var input = self.refs.input; var input = self.refs.input;
self.outputs.push(JSON.stringify(vm.evaluateString(input.value))); if (!input.value) {
return;
}
else {
try {
self.outputs.push(JSON.stringify(vm.evaluateString(input.value)));
}
catch (e) {
self.outputs.push(`Error: ${e.errormessage}`);
}
}
self.refs.input.value = self.default; self.refs.input.value = self.default;
self.update(); self.update();
} }

20
server/tokenize.js

@ -384,18 +384,14 @@ function tokenize(tokstream, matchop) {
} }
function tokenizeHelp(input, matchop, strip_whitespace) { function tokenizeHelp(input, matchop, strip_whitespace) {
try { return tokenize(input, matchop).reverse().filter(function(x) {
return tokenize(input, matchop).reverse().filter(function(x) { if (strip_whitespace) {
if (strip_whitespace) { return x[0] !== "whitespace";
return x[0] !== "whitespace"; }
} else {
else { return true;
return true; }
} });
});
} catch (e) {
console.log(e.stxerror());
}
} }
var defop_pattern = ["defop", "integer", "constructor", var defop_pattern = ["defop", "integer", "constructor",

Loading…
Cancel
Save