2931 lines
107 KiB
JavaScript
2931 lines
107 KiB
JavaScript
|
/***********************************************************************
|
||
|
|
||
|
A JavaScript tokenizer / parser / beautifier / compressor.
|
||
|
https://github.com/mishoo/UglifyJS2
|
||
|
|
||
|
-------------------------------- (C) ---------------------------------
|
||
|
|
||
|
Author: Mihai Bazon
|
||
|
<mihai.bazon@gmail.com>
|
||
|
http://mihai.bazon.net/blog
|
||
|
|
||
|
Distributed under the BSD license:
|
||
|
|
||
|
Copyright 2012 (c) Mihai Bazon <mihai.bazon@gmail.com>
|
||
|
Parser based on parse-js (http://marijn.haverbeke.nl/parse-js/).
|
||
|
|
||
|
Redistribution and use in source and binary forms, with or without
|
||
|
modification, are permitted provided that the following conditions
|
||
|
are met:
|
||
|
|
||
|
* Redistributions of source code must retain the above
|
||
|
copyright notice, this list of conditions and the following
|
||
|
disclaimer.
|
||
|
|
||
|
* Redistributions in binary form must reproduce the above
|
||
|
copyright notice, this list of conditions and the following
|
||
|
disclaimer in the documentation and/or other materials
|
||
|
provided with the distribution.
|
||
|
|
||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER “AS IS” AND ANY
|
||
|
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||
|
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||
|
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE
|
||
|
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
|
||
|
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||
|
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||
|
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||
|
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
||
|
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
|
||
|
THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
|
||
|
SUCH DAMAGE.
|
||
|
|
||
|
***********************************************************************/
|
||
|
|
||
|
"use strict";
|
||
|
|
||
|
var KEYWORDS = 'break case catch class const continue debugger default delete do else export extends finally for function if in instanceof let new return switch throw try typeof var void while with';
|
||
|
var KEYWORDS_ATOM = 'false null true';
|
||
|
var RESERVED_WORDS = 'enum implements import interface package private protected public static super this ' + KEYWORDS_ATOM + " " + KEYWORDS;
|
||
|
var KEYWORDS_BEFORE_EXPRESSION = 'return new delete throw else case yield await';
|
||
|
|
||
|
KEYWORDS = makePredicate(KEYWORDS);
|
||
|
RESERVED_WORDS = makePredicate(RESERVED_WORDS);
|
||
|
KEYWORDS_BEFORE_EXPRESSION = makePredicate(KEYWORDS_BEFORE_EXPRESSION);
|
||
|
KEYWORDS_ATOM = makePredicate(KEYWORDS_ATOM);
|
||
|
|
||
|
var OPERATOR_CHARS = makePredicate(characters("+-*&%=<>!?|~^"));
|
||
|
|
||
|
var RE_NUM_LITERAL = /[0-9a-f]/i;
|
||
|
var RE_HEX_NUMBER = /^0x[0-9a-f]+$/i;
|
||
|
var RE_OCT_NUMBER = /^0[0-7]+$/;
|
||
|
var RE_ES6_OCT_NUMBER = /^0o[0-7]+$/i;
|
||
|
var RE_BIN_NUMBER = /^0b[01]+$/i;
|
||
|
var RE_DEC_NUMBER = /^\d*\.?\d*(?:e[+-]?\d*(?:\d\.?|\.?\d)\d*)?$/i;
|
||
|
|
||
|
var OPERATORS = makePredicate([
|
||
|
"in",
|
||
|
"instanceof",
|
||
|
"typeof",
|
||
|
"new",
|
||
|
"void",
|
||
|
"delete",
|
||
|
"++",
|
||
|
"--",
|
||
|
"+",
|
||
|
"-",
|
||
|
"!",
|
||
|
"~",
|
||
|
"&",
|
||
|
"|",
|
||
|
"^",
|
||
|
"*",
|
||
|
"**",
|
||
|
"/",
|
||
|
"%",
|
||
|
">>",
|
||
|
"<<",
|
||
|
">>>",
|
||
|
"<",
|
||
|
">",
|
||
|
"<=",
|
||
|
">=",
|
||
|
"==",
|
||
|
"===",
|
||
|
"!=",
|
||
|
"!==",
|
||
|
"?",
|
||
|
"=",
|
||
|
"+=",
|
||
|
"-=",
|
||
|
"/=",
|
||
|
"*=",
|
||
|
"**=",
|
||
|
"%=",
|
||
|
">>=",
|
||
|
"<<=",
|
||
|
">>>=",
|
||
|
"|=",
|
||
|
"^=",
|
||
|
"&=",
|
||
|
"&&",
|
||
|
"||"
|
||
|
]);
|
||
|
|
||
|
var WHITESPACE_CHARS = makePredicate(characters(" \u00a0\n\r\t\f\u000b\u200b\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u2028\u2029\u202f\u205f\u3000\uFEFF"));
|
||
|
|
||
|
var NEWLINE_CHARS = makePredicate(characters("\n\r\u2028\u2029"));
|
||
|
|
||
|
var PUNC_AFTER_EXPRESSION = makePredicate(characters(";]),:"));
|
||
|
|
||
|
var PUNC_BEFORE_EXPRESSION = makePredicate(characters("[{(,;:"));
|
||
|
|
||
|
var PUNC_CHARS = makePredicate(characters("[]{}(),;:"));
|
||
|
|
||
|
/* -----[ Tokenizer ]----- */
|
||
|
|
||
|
// surrogate safe regexps adapted from https://github.com/mathiasbynens/unicode-8.0.0/tree/89b412d8a71ecca9ed593d9e9fa073ab64acfebe/Binary_Property
|
||
|
var UNICODE = {
|
||
|
ID_Start: /[A-Za-z\xAA\xB5\xBA\xC0-\xD6\xD8-\xF6\xF8-\u02C1\u02C6-\u02D1\u02E0-\u02E4\u02EC\u02EE\u0370-\u0374\u0376\u0377\u037A-\u037D\u037F\u0386\u0388-\u038A\u038C\u038E-\u03A1\u03A3-\u03F5\u03F7-\u0481\u048A-\u052F\u0531-\u0556\u0559\u0561-\u0587\u05D0-\u05EA\u05F0-\u05F2\u0620-\u064A\u066E\u066F\u0671-\u06D3\u06D5\u06E5\u06E6\u06EE\u06EF\u06FA-\u06FC\u06FF\u0710\u0712-\u072F\u074D-\u07A5\u07B1\u07CA-\u07EA\u07F4\u07F5\u07FA\u0800-\u0815\u081A\u0824\u0828\u0840-\u0858\u08A0-\u08B4\u0904-\u0939\u093D\u0950\u0958-\u0961\u0971-\u0980\u0985-\u098C\u098F\u0990\u0993-\u09A8\u09AA-\u09B0\u09B2\u09B6-\u09B9\u09BD\u09CE\u09DC\u09DD\u09DF-\u09E1\u09F0\u09F1\u0A05-\u0A0A\u0A0F\u0A10\u0A13-\u0A28\u0A2A-\u0A30\u0A32\u0A33\u0A35\u0A36\u0A38\u0A39\u0A59-\u0A5C\u0A5E\u0A72-\u0A74\u0A85-\u0A8D\u0A8F-\u0A91\u0A93-\u0AA8\u0AAA-\u0AB0\u0AB2\u0AB3\u0AB5-\u0AB9\u0ABD\u0AD0\u0AE0\u0AE1\u0AF9\u0B05-\u0B0C\u0B0F\u0B10\u0B13-\u0B28\u0B2A-\u0B30\u0B32\u0B33\u0B35-\u0B39\u0B3D\u0B5C\u0B5D\u0B5F-\u0B61\u0B71\u0B83\u0B85-\u0B8A\u0B8E-\u0B90\u0B92-\u0B95\u0B99\u0B9A\u0B9C\u0B9E\u0B9F\u0BA3\u0BA4\u0BA8-\u0BAA\u0BAE-\u0BB9\u0BD0\u0C05-\u0C0C\u0C0E-\u0C10\u0C12-\u0C28\u0C2A-\u0C39\u0C3D\u0C58-\u0C5A\u0C60\u0C61\u0C85-\u0C8C\u0C8E-\u0C90\u0C92-\u0CA8\u0CAA-\u0CB3\u0CB5-\u0CB9\u0CBD\u0CDE\u0CE0\u0CE1\u0CF1\u0CF2\u0D05-\u0D0C\u0D0E-\u0D10\u0D12-\u0D3A\u0D3D\u0D4E\u0D5F-\u0D61\u0D7A-\u0D7F\u0D85-\u0D96\u0D9A-\u0DB1\u0DB3-\u0DBB\u0DBD\u0DC0-\u0DC6\u0E01-\u0E30\u0E32\u0E33\u0E40-\u0E46\u0E81\u0E82\u0E84\u0E87\u0E88\u0E8A\u0E8D\u0E94-\u0E97\u0E99-\u0E9F\u0EA1-\u0EA3\u0EA5\u0EA7\u0EAA\u0EAB\u0EAD-\u0EB0\u0EB2\u0EB3\u0EBD\u0EC0-\u0EC4\u0EC6\u0EDC-\u0EDF\u0F00\u0F40-\u0F47\u0F49-\u0F6C\u0F88-\u0F8C\u1000-\u102A\u103F\u1050-\u1055\u105A-\u105D\u1061\u1065\u1066\u106E-\u1070\u1075-\u1081\u108E\u10A0-\u10C5\u10C7\u10CD\u10D0-\u10FA\u10FC-\u1248\u124A-\u124D\u1250-\u1256\u1258\u125A-\u125D\u1260-\u1288\u128A-\u128D\u1290-\u12B0\u12B2-\u12B5\u12B8-\u12BE\u12C0\u12C2-\u12C5\u12C8-\u12D6\u12D8-\u1310\u1312-\u1315\u1318-\u135A\u1380-\u138F\u13A0-\u13F5\u13F8-\u13FD\u1401-\u166C\u166F-\u167F\u1681-\u169A\u16A0-\u16EA\u16EE-\u16F8\u1700-\u170C\u170E-\u1711\u1720-\u1731\u1740-\u1751\u1760-\u176C\u176E-\u1770\u1780-\u17B3\u17D7\u17DC\u1820-\u1877\u1880-\u18A8\u18AA\u18B0-\u18F5\u1900-\u191E\u1950-\u196D\u1970-\u1974\u1980-\u19AB\u19B0-\u19C9\u1A00-\u1A16\u1A20-\u1A54\u1AA7\u1B05-\u1B33\u1B45-\u1B4B\u1B83-\u1BA0\u1BAE\u1BAF\u1BBA-\u1BE5\u1C00-\u1C23\u1C4D-\u1C4F\u1C5A-\u1C7D\u1CE9-\u1CEC\u1CEE-\u1CF1\u1CF5\u1CF6\u1D00-\u1DBF\u1E00-\u1F15\u1F18-\u1F1D\u1F20-\u1F45\u1F48-\u1F4D\u1F50-\u1F57\u1F59\u1F5B\u1F5D\u1F5F-\u1F7D\u1F80-\u1FB4\u1FB6-\u1FBC\u1FBE\u1FC2-\u1FC4\u1FC6-\u1FCC\u1FD0-\u1FD3\u1FD6-\u1FDB\u1FE0-\u1FEC\u1FF2-\u1FF4\u1FF6-\u1FFC\u2071\u207F\u2090-\u209C\u2102\u2107\u210A-\u2113\u2115\u2118-\u211D\u2124\u2126\u2128\u212A-\u2139\u213C-\u213F\u2145-\u2149\u214E\u2160-\u2188\u2C00-\u2C2E\u2C30-\u2C5E\u2C60-\u2CE4\u2CEB-\u2CEE\u2CF2\u2CF3\u2D00-\u2D25\u2D27\u2D2D\u2D30-\u2D67\u2D6F\u2D80-\u2D96\u2DA0-\u2DA6\u2DA8-\u2DAE\u2DB0-\u2DB6\u2DB8-\u2DBE\u2DC0-\u2DC6\u2DC8-\u2DCE\u2DD0-\u2DD6\u2DD8-\u2DDE\u3005-\u3007\u3021-\u3029\u3031-\u3035\u3038-\u303C\u3041-\u3096\u309B-\u309F\u30A1-\u30FA\u30FC-\u30FF\u3105-\u312D\u3131-\u318E\u31A0-\u31BA\u31F0-\u31FF\u3400-\u4DB5\u4E00-\u9FD5\uA000-\uA48C\uA4D0-\uA4FD\uA500-\uA60C\uA610-\uA61F\uA62A\uA62B\uA640-\uA66E\uA67F-\uA69D\uA6A0-\uA6EF\uA717-\uA71F\uA722-\uA788\uA78B-\uA7AD\uA7B0-\uA7B7\uA7F7-\uA801\uA803-\uA805\uA807-\uA80A\uA80C-\uA822\uA840-\uA873\uA882-\uA8B3\uA8F2-\uA8F7\uA8FB\uA8FD\uA90A-\uA925\uA930-\uA946\uA960-\uA97C\uA984-\uA9B2\uA9CF\uA9E0-\uA9E4\uA9E6-\uA9EF\uA9FA-\uA9FE\uAA00-\uAA28\uAA40-\uAA42\uAA44-\uAA4B\uAA60-\uAA76\uAA7A\uAA7E-\uAAAF\uAAB1\uAAB5\uAAB6\uAAB9-\uAABD\uAAC0\uAAC2\uAADB-\uAADD\uAAE0-\uAAEA\uAAF2-\uAAF4\uAB01-\uAB06\uAB09-\uAB0E\uAB11-\uAB16\uAB20-\uAB26\uAB28-\uAB2E\uAB30-\uAB5A\uAB5C-\uAB65\uAB70-\uABE2\uAC00-\uD7A3\uD7B0-\uD7C6\uD7CB-\uD7FB\uF900-\uFA6D\uFA70-\uFAD9\uFB00-\uFB06\uFB13-\uFB17\uFB1D\uFB1F-\uFB28\uFB2A-\uFB36\uFB38-\uFB3C\uFB3E\uFB40\uFB41\uFB43\uFB44\uFB46-\uFBB1\uFBD3-\uFD3
|
||
|
ID_Continue: /[0-9A-Z_a-z\xAA\xB5\xB7\xBA\xC0-\xD6\xD8-\xF6\xF8-\u02C1\u02C6-\u02D1\u02E0-\u02E4\u02EC\u02EE\u0300-\u0374\u0376\u0377\u037A-\u037D\u037F\u0386-\u038A\u038C\u038E-\u03A1\u03A3-\u03F5\u03F7-\u0481\u0483-\u0487\u048A-\u052F\u0531-\u0556\u0559\u0561-\u0587\u0591-\u05BD\u05BF\u05C1\u05C2\u05C4\u05C5\u05C7\u05D0-\u05EA\u05F0-\u05F2\u0610-\u061A\u0620-\u0669\u066E-\u06D3\u06D5-\u06DC\u06DF-\u06E8\u06EA-\u06FC\u06FF\u0710-\u074A\u074D-\u07B1\u07C0-\u07F5\u07FA\u0800-\u082D\u0840-\u085B\u08A0-\u08B4\u08E3-\u0963\u0966-\u096F\u0971-\u0983\u0985-\u098C\u098F\u0990\u0993-\u09A8\u09AA-\u09B0\u09B2\u09B6-\u09B9\u09BC-\u09C4\u09C7\u09C8\u09CB-\u09CE\u09D7\u09DC\u09DD\u09DF-\u09E3\u09E6-\u09F1\u0A01-\u0A03\u0A05-\u0A0A\u0A0F\u0A10\u0A13-\u0A28\u0A2A-\u0A30\u0A32\u0A33\u0A35\u0A36\u0A38\u0A39\u0A3C\u0A3E-\u0A42\u0A47\u0A48\u0A4B-\u0A4D\u0A51\u0A59-\u0A5C\u0A5E\u0A66-\u0A75\u0A81-\u0A83\u0A85-\u0A8D\u0A8F-\u0A91\u0A93-\u0AA8\u0AAA-\u0AB0\u0AB2\u0AB3\u0AB5-\u0AB9\u0ABC-\u0AC5\u0AC7-\u0AC9\u0ACB-\u0ACD\u0AD0\u0AE0-\u0AE3\u0AE6-\u0AEF\u0AF9\u0B01-\u0B03\u0B05-\u0B0C\u0B0F\u0B10\u0B13-\u0B28\u0B2A-\u0B30\u0B32\u0B33\u0B35-\u0B39\u0B3C-\u0B44\u0B47\u0B48\u0B4B-\u0B4D\u0B56\u0B57\u0B5C\u0B5D\u0B5F-\u0B63\u0B66-\u0B6F\u0B71\u0B82\u0B83\u0B85-\u0B8A\u0B8E-\u0B90\u0B92-\u0B95\u0B99\u0B9A\u0B9C\u0B9E\u0B9F\u0BA3\u0BA4\u0BA8-\u0BAA\u0BAE-\u0BB9\u0BBE-\u0BC2\u0BC6-\u0BC8\u0BCA-\u0BCD\u0BD0\u0BD7\u0BE6-\u0BEF\u0C00-\u0C03\u0C05-\u0C0C\u0C0E-\u0C10\u0C12-\u0C28\u0C2A-\u0C39\u0C3D-\u0C44\u0C46-\u0C48\u0C4A-\u0C4D\u0C55\u0C56\u0C58-\u0C5A\u0C60-\u0C63\u0C66-\u0C6F\u0C81-\u0C83\u0C85-\u0C8C\u0C8E-\u0C90\u0C92-\u0CA8\u0CAA-\u0CB3\u0CB5-\u0CB9\u0CBC-\u0CC4\u0CC6-\u0CC8\u0CCA-\u0CCD\u0CD5\u0CD6\u0CDE\u0CE0-\u0CE3\u0CE6-\u0CEF\u0CF1\u0CF2\u0D01-\u0D03\u0D05-\u0D0C\u0D0E-\u0D10\u0D12-\u0D3A\u0D3D-\u0D44\u0D46-\u0D48\u0D4A-\u0D4E\u0D57\u0D5F-\u0D63\u0D66-\u0D6F\u0D7A-\u0D7F\u0D82\u0D83\u0D85-\u0D96\u0D9A-\u0DB1\u0DB3-\u0DBB\u0DBD\u0DC0-\u0DC6\u0DCA\u0DCF-\u0DD4\u0DD6\u0DD8-\u0DDF\u0DE6-\u0DEF\u0DF2\u0DF3\u0E01-\u0E3A\u0E40-\u0E4E\u0E50-\u0E59\u0E81\u0E82\u0E84\u0E87\u0E88\u0E8A\u0E8D\u0E94-\u0E97\u0E99-\u0E9F\u0EA1-\u0EA3\u0EA5\u0EA7\u0EAA\u0EAB\u0EAD-\u0EB9\u0EBB-\u0EBD\u0EC0-\u0EC4\u0EC6\u0EC8-\u0ECD\u0ED0-\u0ED9\u0EDC-\u0EDF\u0F00\u0F18\u0F19\u0F20-\u0F29\u0F35\u0F37\u0F39\u0F3E-\u0F47\u0F49-\u0F6C\u0F71-\u0F84\u0F86-\u0F97\u0F99-\u0FBC\u0FC6\u1000-\u1049\u1050-\u109D\u10A0-\u10C5\u10C7\u10CD\u10D0-\u10FA\u10FC-\u1248\u124A-\u124D\u1250-\u1256\u1258\u125A-\u125D\u1260-\u1288\u128A-\u128D\u1290-\u12B0\u12B2-\u12B5\u12B8-\u12BE\u12C0\u12C2-\u12C5\u12C8-\u12D6\u12D8-\u1310\u1312-\u1315\u1318-\u135A\u135D-\u135F\u1369-\u1371\u1380-\u138F\u13A0-\u13F5\u13F8-\u13FD\u1401-\u166C\u166F-\u167F\u1681-\u169A\u16A0-\u16EA\u16EE-\u16F8\u1700-\u170C\u170E-\u1714\u1720-\u1734\u1740-\u1753\u1760-\u176C\u176E-\u1770\u1772\u1773\u1780-\u17D3\u17D7\u17DC\u17DD\u17E0-\u17E9\u180B-\u180D\u1810-\u1819\u1820-\u1877\u1880-\u18AA\u18B0-\u18F5\u1900-\u191E\u1920-\u192B\u1930-\u193B\u1946-\u196D\u1970-\u1974\u1980-\u19AB\u19B0-\u19C9\u19D0-\u19DA\u1A00-\u1A1B\u1A20-\u1A5E\u1A60-\u1A7C\u1A7F-\u1A89\u1A90-\u1A99\u1AA7\u1AB0-\u1ABD\u1B00-\u1B4B\u1B50-\u1B59\u1B6B-\u1B73\u1B80-\u1BF3\u1C00-\u1C37\u1C40-\u1C49\u1C4D-\u1C7D\u1CD0-\u1CD2\u1CD4-\u1CF6\u1CF8\u1CF9\u1D00-\u1DF5\u1DFC-\u1F15\u1F18-\u1F1D\u1F20-\u1F45\u1F48-\u1F4D\u1F50-\u1F57\u1F59\u1F5B\u1F5D\u1F5F-\u1F7D\u1F80-\u1FB4\u1FB6-\u1FBC\u1FBE\u1FC2-\u1FC4\u1FC6-\u1FCC\u1FD0-\u1FD3\u1FD6-\u1FDB\u1FE0-\u1FEC\u1FF2-\u1FF4\u1FF6-\u1FFC\u203F\u2040\u2054\u2071\u207F\u2090-\u209C\u20D0-\u20DC\u20E1\u20E5-\u20F0\u2102\u2107\u210A-\u2113\u2115\u2118-\u211D\u2124\u2126\u2128\u212A-\u2139\u213C-\u213F\u2145-\u2149\u214E\u2160-\u2188\u2C00-\u2C2E\u2C30-\u2C5E\u2C60-\u2CE4\u2CEB-\u2CF3\u2D00-\u2D25\u2D27\u2D2D\u2D30-\u2D67\u2D6F\u2D7F-\u2D96\u2DA0-\u2DA6\u2DA8-\u2DAE\u2DB0-\u2DB6\u2DB8-\u2DBE\u2DC0-\u2DC6\u2DC8-\u2DCE\u2DD0-\u2DD6\u2DD8-\u2DDE\u2DE0-\u2DFF\u3005-\u3007\u3021-\u302F\u3031-\u3035\u3038-\u303C\u3041-\u3096\u3099-\u309F\u30A1-\u30FA\u30FC-\u30FF\u3105-\u312D\u3131-\u318E\u31A0-\u31BA\u31F0-\u31FF\u3400-\u4DB5\u4
|
||
|
};
|
||
|
|
||
|
function get_full_char(str, pos) {
|
||
|
var char = str.charAt(pos);
|
||
|
if (is_surrogate_pair_head(char)) {
|
||
|
var next = str.charAt(pos + 1);
|
||
|
if (is_surrogate_pair_tail(next)) {
|
||
|
return char + next;
|
||
|
}
|
||
|
}
|
||
|
if (is_surrogate_pair_tail(char)) {
|
||
|
var prev = str.charAt(pos - 1);
|
||
|
if (is_surrogate_pair_head(prev)) {
|
||
|
return prev + char;
|
||
|
}
|
||
|
}
|
||
|
return char;
|
||
|
}
|
||
|
|
||
|
function get_full_char_code(str, pos) {
|
||
|
// https://en.wikipedia.org/wiki/Universal_Character_Set_characters#Surrogates
|
||
|
if (is_surrogate_pair_head(str.charAt(pos))) {
|
||
|
return 0x10000 + (str.charCodeAt(pos) - 0xd800 << 10) + str.charCodeAt(pos + 1) - 0xdc00;
|
||
|
}
|
||
|
return str.charCodeAt(pos);
|
||
|
}
|
||
|
|
||
|
function get_full_char_length(str) {
|
||
|
var surrogates = 0;
|
||
|
|
||
|
for (var i = 0; i < str.length; i++) {
|
||
|
if (is_surrogate_pair_head(str.charCodeAt(i))) {
|
||
|
if (is_surrogate_pair_tail(str.charCodeAt(i + 1))) {
|
||
|
surrogates++;
|
||
|
i++;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
return str.length - surrogates;
|
||
|
}
|
||
|
|
||
|
function from_char_code(code) {
|
||
|
// Based on https://github.com/mathiasbynens/String.fromCodePoint/blob/master/fromcodepoint.js
|
||
|
if (code > 0xFFFF) {
|
||
|
code -= 0x10000;
|
||
|
return (String.fromCharCode((code >> 10) + 0xD800) +
|
||
|
String.fromCharCode((code % 0x400) + 0xDC00));
|
||
|
}
|
||
|
return String.fromCharCode(code);
|
||
|
}
|
||
|
|
||
|
function is_surrogate_pair_head(code) {
|
||
|
if (typeof code === "string")
|
||
|
code = code.charCodeAt(0);
|
||
|
|
||
|
return code >= 0xd800 && code <= 0xdbff;
|
||
|
}
|
||
|
|
||
|
function is_surrogate_pair_tail(code) {
|
||
|
if (typeof code === "string")
|
||
|
code = code.charCodeAt(0);
|
||
|
return code >= 0xdc00 && code <= 0xdfff;
|
||
|
}
|
||
|
|
||
|
function is_digit(code) {
|
||
|
return code >= 48 && code <= 57;
|
||
|
};
|
||
|
|
||
|
function is_identifier(name) {
|
||
|
if (typeof name !== "string" || RESERVED_WORDS(name))
|
||
|
return false;
|
||
|
|
||
|
return true;
|
||
|
};
|
||
|
|
||
|
function is_identifier_start(ch) {
|
||
|
var code = ch.charCodeAt(0);
|
||
|
return UNICODE.ID_Start.test(ch) || code == 36 || code == 95;
|
||
|
};
|
||
|
|
||
|
function is_identifier_char(ch) {
|
||
|
var code = ch.charCodeAt(0);
|
||
|
return UNICODE.ID_Continue.test(ch)
|
||
|
|| code == 36
|
||
|
|| code == 95
|
||
|
|| code == 8204 // \u200c: zero-width non-joiner <ZWNJ>
|
||
|
|| code == 8205 // \u200d: zero-width joiner <ZWJ> (in my ECMA-262 PDF, this is also 200c)
|
||
|
;
|
||
|
};
|
||
|
|
||
|
function is_identifier_string(str){
|
||
|
return /^[a-z_$][a-z0-9_$]*$/i.test(str);
|
||
|
};
|
||
|
|
||
|
function parse_js_number(num) {
|
||
|
if (RE_HEX_NUMBER.test(num)) {
|
||
|
return parseInt(num.substr(2), 16);
|
||
|
} else if (RE_OCT_NUMBER.test(num)) {
|
||
|
return parseInt(num.substr(1), 8);
|
||
|
} else if (RE_ES6_OCT_NUMBER.test(num)) {
|
||
|
return parseInt(num.substr(2), 8);
|
||
|
} else if (RE_BIN_NUMBER.test(num)) {
|
||
|
return parseInt(num.substr(2), 2);
|
||
|
} else if (RE_DEC_NUMBER.test(num)) {
|
||
|
return parseFloat(num);
|
||
|
} else {
|
||
|
var val = parseFloat(num);
|
||
|
if (val == num) return val;
|
||
|
}
|
||
|
};
|
||
|
|
||
|
function JS_Parse_Error(message, filename, line, col, pos) {
|
||
|
this.message = message;
|
||
|
this.filename = filename;
|
||
|
this.line = line;
|
||
|
this.col = col;
|
||
|
this.pos = pos;
|
||
|
};
|
||
|
JS_Parse_Error.prototype = Object.create(Error.prototype);
|
||
|
JS_Parse_Error.prototype.constructor = JS_Parse_Error;
|
||
|
JS_Parse_Error.prototype.name = "SyntaxError";
|
||
|
configure_error_stack(JS_Parse_Error);
|
||
|
|
||
|
function js_error(message, filename, line, col, pos) {
|
||
|
throw new JS_Parse_Error(message, filename, line, col, pos);
|
||
|
};
|
||
|
|
||
|
function is_token(token, type, val) {
|
||
|
return token.type == type && (val == null || token.value == val);
|
||
|
};
|
||
|
|
||
|
var EX_EOF = {};
|
||
|
|
||
|
function tokenizer($TEXT, filename, html5_comments, shebang) {
|
||
|
|
||
|
var S = {
|
||
|
text : $TEXT,
|
||
|
filename : filename,
|
||
|
pos : 0,
|
||
|
tokpos : 0,
|
||
|
line : 1,
|
||
|
tokline : 0,
|
||
|
col : 0,
|
||
|
tokcol : 0,
|
||
|
newline_before : false,
|
||
|
regex_allowed : false,
|
||
|
brace_counter : 0,
|
||
|
template_braces : [],
|
||
|
comments_before : [],
|
||
|
directives : {},
|
||
|
directive_stack : []
|
||
|
};
|
||
|
|
||
|
function peek() { return get_full_char(S.text, S.pos); };
|
||
|
|
||
|
function next(signal_eof, in_string) {
|
||
|
var ch = get_full_char(S.text, S.pos++);
|
||
|
if (signal_eof && !ch)
|
||
|
throw EX_EOF;
|
||
|
if (NEWLINE_CHARS(ch)) {
|
||
|
S.newline_before = S.newline_before || !in_string;
|
||
|
++S.line;
|
||
|
S.col = 0;
|
||
|
if (!in_string && ch == "\r" && peek() == "\n") {
|
||
|
// treat a \r\n sequence as a single \n
|
||
|
++S.pos;
|
||
|
ch = "\n";
|
||
|
}
|
||
|
} else {
|
||
|
if (ch.length > 1) {
|
||
|
++S.pos;
|
||
|
++S.col;
|
||
|
}
|
||
|
++S.col;
|
||
|
}
|
||
|
return ch;
|
||
|
};
|
||
|
|
||
|
function forward(i) {
|
||
|
while (i-- > 0) next();
|
||
|
};
|
||
|
|
||
|
function looking_at(str) {
|
||
|
return S.text.substr(S.pos, str.length) == str;
|
||
|
};
|
||
|
|
||
|
function find_eol() {
|
||
|
var text = S.text;
|
||
|
for (var i = S.pos, n = S.text.length; i < n; ++i) {
|
||
|
var ch = text[i];
|
||
|
if (NEWLINE_CHARS(ch))
|
||
|
return i;
|
||
|
}
|
||
|
return -1;
|
||
|
};
|
||
|
|
||
|
function find(what, signal_eof) {
|
||
|
var pos = S.text.indexOf(what, S.pos);
|
||
|
if (signal_eof && pos == -1) throw EX_EOF;
|
||
|
return pos;
|
||
|
};
|
||
|
|
||
|
function start_token() {
|
||
|
S.tokline = S.line;
|
||
|
S.tokcol = S.col;
|
||
|
S.tokpos = S.pos;
|
||
|
};
|
||
|
|
||
|
var prev_was_dot = false;
|
||
|
function token(type, value, is_comment) {
|
||
|
S.regex_allowed = ((type == "operator" && !UNARY_POSTFIX(value)) ||
|
||
|
(type == "keyword" && KEYWORDS_BEFORE_EXPRESSION(value)) ||
|
||
|
(type == "punc" && PUNC_BEFORE_EXPRESSION(value))) ||
|
||
|
(type == "arrow");
|
||
|
if (type == "punc" && value == ".") {
|
||
|
prev_was_dot = true;
|
||
|
} else if (!is_comment) {
|
||
|
prev_was_dot = false;
|
||
|
}
|
||
|
var ret = {
|
||
|
type : type,
|
||
|
value : value,
|
||
|
line : S.tokline,
|
||
|
col : S.tokcol,
|
||
|
pos : S.tokpos,
|
||
|
endline : S.line,
|
||
|
endcol : S.col,
|
||
|
endpos : S.pos,
|
||
|
nlb : S.newline_before,
|
||
|
file : filename
|
||
|
};
|
||
|
if (/^(?:num|string|regexp)$/i.test(type)) {
|
||
|
ret.raw = $TEXT.substring(ret.pos, ret.endpos);
|
||
|
}
|
||
|
if (!is_comment) {
|
||
|
ret.comments_before = S.comments_before;
|
||
|
ret.comments_after = S.comments_before = [];
|
||
|
}
|
||
|
S.newline_before = false;
|
||
|
return new AST_Token(ret);
|
||
|
};
|
||
|
|
||
|
function skip_whitespace() {
|
||
|
while (WHITESPACE_CHARS(peek()))
|
||
|
next();
|
||
|
};
|
||
|
|
||
|
function read_while(pred) {
|
||
|
var ret = "", ch, i = 0;
|
||
|
while ((ch = peek()) && pred(ch, i++))
|
||
|
ret += next();
|
||
|
return ret;
|
||
|
};
|
||
|
|
||
|
function parse_error(err) {
|
||
|
js_error(err, filename, S.tokline, S.tokcol, S.tokpos);
|
||
|
};
|
||
|
|
||
|
function read_num(prefix) {
|
||
|
var has_e = false, after_e = false, has_x = false, has_dot = prefix == ".";
|
||
|
var num = read_while(function(ch, i){
|
||
|
var code = ch.charCodeAt(0);
|
||
|
switch (code) {
|
||
|
case 98: case 66: // bB
|
||
|
return (has_x = true); // Can occur in hex sequence, don't return false yet
|
||
|
case 111: case 79: // oO
|
||
|
case 120: case 88: // xX
|
||
|
return has_x ? false : (has_x = true);
|
||
|
case 101: case 69: // eE
|
||
|
return has_x ? true : has_e ? false : (has_e = after_e = true);
|
||
|
case 45: // -
|
||
|
return after_e || (i == 0 && !prefix);
|
||
|
case 43: // +
|
||
|
return after_e;
|
||
|
case (after_e = false, 46): // .
|
||
|
return (!has_dot && !has_x && !has_e) ? (has_dot = true) : false;
|
||
|
}
|
||
|
return RE_NUM_LITERAL.test(ch);
|
||
|
});
|
||
|
if (prefix) num = prefix + num;
|
||
|
if (RE_OCT_NUMBER.test(num) && next_token.has_directive("use strict")) {
|
||
|
parse_error("Legacy octal literals are not allowed in strict mode");
|
||
|
}
|
||
|
var valid = parse_js_number(num);
|
||
|
if (!isNaN(valid)) {
|
||
|
return token("num", valid);
|
||
|
} else {
|
||
|
parse_error("Invalid syntax: " + num);
|
||
|
}
|
||
|
};
|
||
|
|
||
|
function read_escaped_char(in_string) {
|
||
|
var ch = next(true, in_string);
|
||
|
switch (ch.charCodeAt(0)) {
|
||
|
case 110 : return "\n";
|
||
|
case 114 : return "\r";
|
||
|
case 116 : return "\t";
|
||
|
case 98 : return "\b";
|
||
|
case 118 : return "\u000b"; // \v
|
||
|
case 102 : return "\f";
|
||
|
case 120 : return String.fromCharCode(hex_bytes(2)); // \x
|
||
|
case 117 : // \u
|
||
|
if (peek() == "{") {
|
||
|
next(true);
|
||
|
if (peek() === "}")
|
||
|
parse_error("Expecting hex-character between {}");
|
||
|
while (peek() == "0") next(true); // No significance
|
||
|
var result, length = find("}", true) - S.pos;
|
||
|
// Avoid 32 bit integer overflow (1 << 32 === 1)
|
||
|
// We know first character isn't 0 and thus out of range anyway
|
||
|
if (length > 6 || (result = hex_bytes(length)) > 0x10FFFF) {
|
||
|
parse_error("Unicode reference out of bounce");
|
||
|
}
|
||
|
next(true);
|
||
|
return from_char_code(result);
|
||
|
}
|
||
|
return String.fromCharCode(hex_bytes(4));
|
||
|
case 10 : return ""; // newline
|
||
|
case 13 : // \r
|
||
|
if (peek() == "\n") { // DOS newline
|
||
|
next(true, in_string);
|
||
|
return "";
|
||
|
}
|
||
|
}
|
||
|
if (ch >= "0" && ch <= "7")
|
||
|
return read_octal_escape_sequence(ch);
|
||
|
return ch;
|
||
|
};
|
||
|
|
||
|
function read_octal_escape_sequence(ch) {
|
||
|
// Read
|
||
|
var p = peek();
|
||
|
if (p >= "0" && p <= "7") {
|
||
|
ch += next(true);
|
||
|
if (ch[0] <= "3" && (p = peek()) >= "0" && p <= "7")
|
||
|
ch += next(true);
|
||
|
}
|
||
|
|
||
|
// Parse
|
||
|
if (ch === "0") return "\0";
|
||
|
if (ch.length > 0 && next_token.has_directive("use strict"))
|
||
|
parse_error("Legacy octal escape sequences are not allowed in strict mode");
|
||
|
return String.fromCharCode(parseInt(ch, 8));
|
||
|
}
|
||
|
|
||
|
function hex_bytes(n) {
|
||
|
var num = 0;
|
||
|
for (; n > 0; --n) {
|
||
|
var digit = parseInt(next(true), 16);
|
||
|
if (isNaN(digit))
|
||
|
parse_error("Invalid hex-character pattern in string");
|
||
|
num = (num << 4) | digit;
|
||
|
}
|
||
|
return num;
|
||
|
};
|
||
|
|
||
|
var read_string = with_eof_error("Unterminated string constant", function(quote_char){
|
||
|
var quote = next(), ret = "";
|
||
|
for (;;) {
|
||
|
var ch = next(true, true);
|
||
|
if (ch == "\\") ch = read_escaped_char(true);
|
||
|
else if (NEWLINE_CHARS(ch)) parse_error("Unterminated string constant");
|
||
|
else if (ch == quote) break;
|
||
|
ret += ch;
|
||
|
}
|
||
|
var tok = token("string", ret);
|
||
|
tok.quote = quote_char;
|
||
|
return tok;
|
||
|
});
|
||
|
|
||
|
var read_template_characters = with_eof_error("Unterminated template", function(begin){
|
||
|
if (begin) {
|
||
|
S.template_braces.push(S.brace_counter);
|
||
|
}
|
||
|
var content = "", raw = "", ch, tok;
|
||
|
next(true, true);
|
||
|
while ((ch = next(true, true)) != "`") {
|
||
|
if (ch == "\r") {
|
||
|
if (peek() == "\n") ++S.pos;
|
||
|
ch = "\n";
|
||
|
} else if (ch == "$" && peek() == "{") {
|
||
|
next(true, true);
|
||
|
S.brace_counter++;
|
||
|
tok = token(begin ? "template_head" : "template_substitution", content);
|
||
|
tok.begin = begin;
|
||
|
tok.raw = raw;
|
||
|
tok.end = false;
|
||
|
return tok;
|
||
|
}
|
||
|
|
||
|
raw += ch;
|
||
|
if (ch == "\\") {
|
||
|
var tmp = S.pos;
|
||
|
ch = read_escaped_char();
|
||
|
raw += S.text.substr(tmp, S.pos - tmp);
|
||
|
}
|
||
|
|
||
|
content += ch;
|
||
|
}
|
||
|
S.template_braces.pop();
|
||
|
tok = token(begin ? "template_head" : "template_substitution", content);
|
||
|
tok.begin = begin;
|
||
|
tok.raw = raw;
|
||
|
tok.end = true;
|
||
|
return tok;
|
||
|
});
|
||
|
|
||
|
function skip_line_comment(type) {
|
||
|
var regex_allowed = S.regex_allowed;
|
||
|
var i = find_eol(), ret;
|
||
|
if (i == -1) {
|
||
|
ret = S.text.substr(S.pos);
|
||
|
S.pos = S.text.length;
|
||
|
} else {
|
||
|
ret = S.text.substring(S.pos, i);
|
||
|
S.pos = i;
|
||
|
}
|
||
|
S.col = S.tokcol + (S.pos - S.tokpos);
|
||
|
S.comments_before.push(token(type, ret, true));
|
||
|
S.regex_allowed = regex_allowed;
|
||
|
return next_token;
|
||
|
};
|
||
|
|
||
|
var skip_multiline_comment = with_eof_error("Unterminated multiline comment", function(){
|
||
|
var regex_allowed = S.regex_allowed;
|
||
|
var i = find("*/", true);
|
||
|
var text = S.text.substring(S.pos, i).replace(/\r\n|\r|\u2028|\u2029/g, '\n');
|
||
|
// update stream position
|
||
|
forward(get_full_char_length(text) /* text length doesn't count \r\n as 2 char while S.pos - i does */ + 2);
|
||
|
S.comments_before.push(token("comment2", text, true));
|
||
|
S.newline_before = S.newline_before || text.indexOf("\n") >= 0;
|
||
|
S.regex_allowed = regex_allowed;
|
||
|
return next_token;
|
||
|
});
|
||
|
|
||
|
var read_name = with_eof_error("Unterminated identifier name", function() {
|
||
|
var name = "", ch, escaped = false, hex;
|
||
|
var read_escaped_identifier_char = function() {
|
||
|
escaped = true;
|
||
|
next();
|
||
|
if (peek() !== "u") {
|
||
|
parse_error("Expecting UnicodeEscapeSequence -- uXXXX or u{XXXX}");
|
||
|
}
|
||
|
return read_escaped_char();
|
||
|
}
|
||
|
|
||
|
// Read first character (ID_Start)
|
||
|
if ((name = peek()) === "\\") {
|
||
|
name = read_escaped_identifier_char();
|
||
|
if (!is_identifier_start(name)) {
|
||
|
parse_error("First identifier char is an invalid identifier char");
|
||
|
}
|
||
|
} else if (is_identifier_start(name)){
|
||
|
next();
|
||
|
} else {
|
||
|
return "";
|
||
|
}
|
||
|
|
||
|
// Read ID_Continue
|
||
|
while ((ch = peek()) != null) {
|
||
|
if ((ch = peek()) === "\\") {
|
||
|
ch = read_escaped_identifier_char();
|
||
|
if (!is_identifier_char(ch)) {
|
||
|
parse_error("Invalid escaped identifier char");
|
||
|
}
|
||
|
} else {
|
||
|
if (!is_identifier_char(ch)) {
|
||
|
break;
|
||
|
}
|
||
|
next();
|
||
|
}
|
||
|
name += ch;
|
||
|
}
|
||
|
if (RESERVED_WORDS(name) && escaped) {
|
||
|
parse_error("Escaped characters are not allowed in keywords");
|
||
|
}
|
||
|
return name;
|
||
|
});
|
||
|
|
||
|
var read_regexp = with_eof_error("Unterminated regular expression", function(source) {
|
||
|
var prev_backslash = false, ch, in_class = false;
|
||
|
while ((ch = next(true))) if (NEWLINE_CHARS(ch)) {
|
||
|
parse_error("Unexpected line terminator");
|
||
|
} else if (prev_backslash) {
|
||
|
source += "\\" + ch;
|
||
|
prev_backslash = false;
|
||
|
} else if (ch == "[") {
|
||
|
in_class = true;
|
||
|
source += ch;
|
||
|
} else if (ch == "]" && in_class) {
|
||
|
in_class = false;
|
||
|
source += ch;
|
||
|
} else if (ch == "/" && !in_class) {
|
||
|
break;
|
||
|
} else if (ch == "\\") {
|
||
|
prev_backslash = true;
|
||
|
} else {
|
||
|
source += ch;
|
||
|
}
|
||
|
var mods = read_name();
|
||
|
try {
|
||
|
var regexp = new RegExp(source, mods);
|
||
|
regexp.raw_source = source;
|
||
|
return token("regexp", regexp);
|
||
|
} catch(e) {
|
||
|
parse_error(e.message);
|
||
|
}
|
||
|
});
|
||
|
|
||
|
function read_operator(prefix) {
|
||
|
function grow(op) {
|
||
|
if (!peek()) return op;
|
||
|
var bigger = op + peek();
|
||
|
if (OPERATORS(bigger)) {
|
||
|
next();
|
||
|
return grow(bigger);
|
||
|
} else {
|
||
|
return op;
|
||
|
}
|
||
|
};
|
||
|
return token("operator", grow(prefix || next()));
|
||
|
};
|
||
|
|
||
|
function handle_slash() {
|
||
|
next();
|
||
|
switch (peek()) {
|
||
|
case "/":
|
||
|
next();
|
||
|
return skip_line_comment("comment1");
|
||
|
case "*":
|
||
|
next();
|
||
|
return skip_multiline_comment();
|
||
|
}
|
||
|
return S.regex_allowed ? read_regexp("") : read_operator("/");
|
||
|
};
|
||
|
|
||
|
function handle_eq_sign() {
|
||
|
next();
|
||
|
if (peek() === ">") {
|
||
|
next();
|
||
|
return token("arrow", "=>");
|
||
|
} else {
|
||
|
return read_operator("=");
|
||
|
}
|
||
|
};
|
||
|
|
||
|
function handle_dot() {
|
||
|
next();
|
||
|
if (is_digit(peek().charCodeAt(0))) {
|
||
|
return read_num(".");
|
||
|
}
|
||
|
if (peek() === ".") {
|
||
|
next(); // Consume second dot
|
||
|
next(); // Consume third dot
|
||
|
return token("expand", "...");
|
||
|
}
|
||
|
|
||
|
return token("punc", ".");
|
||
|
};
|
||
|
|
||
|
function read_word() {
|
||
|
var word = read_name();
|
||
|
if (prev_was_dot) return token("name", word);
|
||
|
return KEYWORDS_ATOM(word) ? token("atom", word)
|
||
|
: !KEYWORDS(word) ? token("name", word)
|
||
|
: OPERATORS(word) ? token("operator", word)
|
||
|
: token("keyword", word);
|
||
|
};
|
||
|
|
||
|
function with_eof_error(eof_error, cont) {
|
||
|
return function(x) {
|
||
|
try {
|
||
|
return cont(x);
|
||
|
} catch(ex) {
|
||
|
if (ex === EX_EOF) parse_error(eof_error);
|
||
|
else throw ex;
|
||
|
}
|
||
|
};
|
||
|
};
|
||
|
|
||
|
function next_token(force_regexp) {
|
||
|
if (force_regexp != null)
|
||
|
return read_regexp(force_regexp);
|
||
|
if (shebang && S.pos == 0 && looking_at("#!")) {
|
||
|
start_token();
|
||
|
forward(2);
|
||
|
skip_line_comment("comment5");
|
||
|
}
|
||
|
for (;;) {
|
||
|
skip_whitespace();
|
||
|
start_token();
|
||
|
if (html5_comments) {
|
||
|
if (looking_at("<!--")) {
|
||
|
forward(4);
|
||
|
skip_line_comment("comment3");
|
||
|
continue;
|
||
|
}
|
||
|
if (looking_at("-->") && S.newline_before) {
|
||
|
forward(3);
|
||
|
skip_line_comment("comment4");
|
||
|
continue;
|
||
|
}
|
||
|
}
|
||
|
var ch = peek();
|
||
|
if (!ch) return token("eof");
|
||
|
var code = ch.charCodeAt(0);
|
||
|
switch (code) {
|
||
|
case 34: case 39: return read_string(ch);
|
||
|
case 46: return handle_dot();
|
||
|
case 47: {
|
||
|
var tok = handle_slash();
|
||
|
if (tok === next_token) continue;
|
||
|
return tok;
|
||
|
}
|
||
|
case 61: return handle_eq_sign();
|
||
|
case 96: return read_template_characters(true);
|
||
|
case 123:
|
||
|
S.brace_counter++;
|
||
|
break;
|
||
|
case 125:
|
||
|
S.brace_counter--;
|
||
|
if (S.template_braces.length > 0
|
||
|
&& S.template_braces[S.template_braces.length - 1] === S.brace_counter)
|
||
|
return read_template_characters(false);
|
||
|
break;
|
||
|
}
|
||
|
if (is_digit(code)) return read_num();
|
||
|
if (PUNC_CHARS(ch)) return token("punc", next());
|
||
|
if (OPERATOR_CHARS(ch)) return read_operator();
|
||
|
if (code == 92 || is_identifier_start(ch)) return read_word();
|
||
|
break;
|
||
|
}
|
||
|
parse_error("Unexpected character '" + ch + "'");
|
||
|
};
|
||
|
|
||
|
next_token.next = next;
|
||
|
next_token.peek = peek;
|
||
|
|
||
|
next_token.context = function(nc) {
|
||
|
if (nc) S = nc;
|
||
|
return S;
|
||
|
};
|
||
|
|
||
|
next_token.add_directive = function(directive) {
|
||
|
S.directive_stack[S.directive_stack.length - 1].push(directive);
|
||
|
|
||
|
if (S.directives[directive] === undefined) {
|
||
|
S.directives[directive] = 1;
|
||
|
} else {
|
||
|
S.directives[directive]++;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
next_token.push_directives_stack = function() {
|
||
|
S.directive_stack.push([]);
|
||
|
}
|
||
|
|
||
|
next_token.pop_directives_stack = function() {
|
||
|
var directives = S.directive_stack[S.directive_stack.length - 1];
|
||
|
|
||
|
for (var i = 0; i < directives.length; i++) {
|
||
|
S.directives[directives[i]]--;
|
||
|
}
|
||
|
|
||
|
S.directive_stack.pop();
|
||
|
}
|
||
|
|
||
|
next_token.has_directive = function(directive) {
|
||
|
return S.directives[directive] > 0;
|
||
|
}
|
||
|
|
||
|
return next_token;
|
||
|
|
||
|
};
|
||
|
|
||
|
/* -----[ Parser (constants) ]----- */
|
||
|
|
||
|
var UNARY_PREFIX = makePredicate([
|
||
|
"typeof",
|
||
|
"void",
|
||
|
"delete",
|
||
|
"--",
|
||
|
"++",
|
||
|
"!",
|
||
|
"~",
|
||
|
"-",
|
||
|
"+"
|
||
|
]);
|
||
|
|
||
|
var UNARY_POSTFIX = makePredicate([ "--", "++" ]);
|
||
|
|
||
|
var ASSIGNMENT = makePredicate([ "=", "+=", "-=", "/=", "*=", "**=", "%=", ">>=", "<<=", ">>>=", "|=", "^=", "&=" ]);
|
||
|
|
||
|
var PRECEDENCE = (function(a, ret){
|
||
|
for (var i = 0; i < a.length; ++i) {
|
||
|
var b = a[i];
|
||
|
for (var j = 0; j < b.length; ++j) {
|
||
|
ret[b[j]] = i + 1;
|
||
|
}
|
||
|
}
|
||
|
return ret;
|
||
|
})(
|
||
|
[
|
||
|
["||"],
|
||
|
["&&"],
|
||
|
["|"],
|
||
|
["^"],
|
||
|
["&"],
|
||
|
["==", "===", "!=", "!=="],
|
||
|
["<", ">", "<=", ">=", "in", "instanceof"],
|
||
|
[">>", "<<", ">>>"],
|
||
|
["+", "-"],
|
||
|
["*", "/", "%"],
|
||
|
["**"]
|
||
|
],
|
||
|
{}
|
||
|
);
|
||
|
|
||
|
var ATOMIC_START_TOKEN = makePredicate([ "atom", "num", "string", "regexp", "name" ]);
|
||
|
|
||
|
/* -----[ Parser ]----- */
|
||
|
|
||
|
function parse($TEXT, options) {
|
||
|
|
||
|
options = defaults(options, {
|
||
|
bare_returns : false,
|
||
|
ecma : 8,
|
||
|
expression : false,
|
||
|
filename : null,
|
||
|
html5_comments : true,
|
||
|
shebang : true,
|
||
|
strict : false,
|
||
|
toplevel : null,
|
||
|
}, true);
|
||
|
|
||
|
var S = {
|
||
|
input : (typeof $TEXT == "string"
|
||
|
? tokenizer($TEXT, options.filename,
|
||
|
options.html5_comments, options.shebang)
|
||
|
: $TEXT),
|
||
|
token : null,
|
||
|
prev : null,
|
||
|
peeked : null,
|
||
|
in_function : 0,
|
||
|
in_async : -1,
|
||
|
in_generator : -1,
|
||
|
in_directives : true,
|
||
|
in_loop : 0,
|
||
|
labels : []
|
||
|
};
|
||
|
|
||
|
S.token = next();
|
||
|
|
||
|
function is(type, value) {
|
||
|
return is_token(S.token, type, value);
|
||
|
};
|
||
|
|
||
|
function peek() { return S.peeked || (S.peeked = S.input()); };
|
||
|
|
||
|
function next() {
|
||
|
S.prev = S.token;
|
||
|
if (S.peeked) {
|
||
|
S.token = S.peeked;
|
||
|
S.peeked = null;
|
||
|
} else {
|
||
|
S.token = S.input();
|
||
|
}
|
||
|
S.in_directives = S.in_directives && (
|
||
|
S.token.type == "string" || is("punc", ";")
|
||
|
);
|
||
|
return S.token;
|
||
|
};
|
||
|
|
||
|
function prev() {
|
||
|
return S.prev;
|
||
|
};
|
||
|
|
||
|
function croak(msg, line, col, pos) {
|
||
|
var ctx = S.input.context();
|
||
|
js_error(msg,
|
||
|
ctx.filename,
|
||
|
line != null ? line : ctx.tokline,
|
||
|
col != null ? col : ctx.tokcol,
|
||
|
pos != null ? pos : ctx.tokpos);
|
||
|
};
|
||
|
|
||
|
function token_error(token, msg) {
|
||
|
croak(msg, token.line, token.col);
|
||
|
};
|
||
|
|
||
|
function unexpected(token) {
|
||
|
if (token == null)
|
||
|
token = S.token;
|
||
|
token_error(token, "Unexpected token: " + token.type + " (" + token.value + ")");
|
||
|
};
|
||
|
|
||
|
function expect_token(type, val) {
|
||
|
if (is(type, val)) {
|
||
|
return next();
|
||
|
}
|
||
|
token_error(S.token, "Unexpected token " + S.token.type + " «" + S.token.value + "»" + ", expected " + type + " «" + val + "»");
|
||
|
};
|
||
|
|
||
|
function expect(punc) { return expect_token("punc", punc); };
|
||
|
|
||
|
function has_newline_before(token) {
|
||
|
return token.nlb || !all(token.comments_before, function(comment) {
|
||
|
return !comment.nlb;
|
||
|
});
|
||
|
}
|
||
|
|
||
|
function can_insert_semicolon() {
|
||
|
return !options.strict
|
||
|
&& (is("eof") || is("punc", "}") || has_newline_before(S.token));
|
||
|
};
|
||
|
|
||
|
function is_in_generator() {
|
||
|
return S.in_generator === S.in_function;
|
||
|
}
|
||
|
|
||
|
function is_in_async() {
|
||
|
return S.in_async === S.in_function;
|
||
|
}
|
||
|
|
||
|
function semicolon(optional) {
|
||
|
if (is("punc", ";")) next();
|
||
|
else if (!optional && !can_insert_semicolon()) unexpected();
|
||
|
};
|
||
|
|
||
|
function parenthesised() {
|
||
|
expect("(");
|
||
|
var exp = expression(true);
|
||
|
expect(")");
|
||
|
return exp;
|
||
|
};
|
||
|
|
||
|
function embed_tokens(parser) {
|
||
|
return function() {
|
||
|
var start = S.token;
|
||
|
var expr = parser.apply(null, arguments);
|
||
|
var end = prev();
|
||
|
expr.start = start;
|
||
|
expr.end = end;
|
||
|
return expr;
|
||
|
};
|
||
|
};
|
||
|
|
||
|
function handle_regexp() {
|
||
|
if (is("operator", "/") || is("operator", "/=")) {
|
||
|
S.peeked = null;
|
||
|
S.token = S.input(S.token.value.substr(1)); // force regexp
|
||
|
}
|
||
|
};
|
||
|
|
||
|
var statement = embed_tokens(function(is_export_default) {
|
||
|
handle_regexp();
|
||
|
switch (S.token.type) {
|
||
|
case "string":
|
||
|
if (S.in_directives) {
|
||
|
var token = peek();
|
||
|
if (S.token.raw.indexOf("\\") == -1
|
||
|
&& (is_token(token, "punc", ";")
|
||
|
|| is_token(token, "punc", "}")
|
||
|
|| has_newline_before(token)
|
||
|
|| is_token(token, "eof"))) {
|
||
|
S.input.add_directive(S.token.value);
|
||
|
} else {
|
||
|
S.in_directives = false;
|
||
|
}
|
||
|
}
|
||
|
var dir = S.in_directives, stat = simple_statement();
|
||
|
return dir ? new AST_Directive(stat.body) : stat;
|
||
|
case "template_head":
|
||
|
case "num":
|
||
|
case "regexp":
|
||
|
case "operator":
|
||
|
case "atom":
|
||
|
return simple_statement();
|
||
|
|
||
|
case "name":
|
||
|
if (S.token.value == "async" && is_token(peek(), "keyword", "function")) {
|
||
|
next();
|
||
|
next();
|
||
|
return function_(AST_Defun, false, true, is_export_default);
|
||
|
}
|
||
|
if (S.token.value == "import" && !is_token(peek(), "punc", "(")) {
|
||
|
next();
|
||
|
var node = import_();
|
||
|
semicolon();
|
||
|
return node;
|
||
|
}
|
||
|
return is_token(peek(), "punc", ":")
|
||
|
? labeled_statement()
|
||
|
: simple_statement();
|
||
|
|
||
|
case "punc":
|
||
|
switch (S.token.value) {
|
||
|
case "{":
|
||
|
return new AST_BlockStatement({
|
||
|
start : S.token,
|
||
|
body : block_(),
|
||
|
end : prev()
|
||
|
});
|
||
|
case "[":
|
||
|
case "(":
|
||
|
return simple_statement();
|
||
|
case ";":
|
||
|
S.in_directives = false;
|
||
|
next();
|
||
|
return new AST_EmptyStatement();
|
||
|
default:
|
||
|
unexpected();
|
||
|
}
|
||
|
|
||
|
case "keyword":
|
||
|
switch (S.token.value) {
|
||
|
case "break":
|
||
|
next();
|
||
|
return break_cont(AST_Break);
|
||
|
|
||
|
case "continue":
|
||
|
next();
|
||
|
return break_cont(AST_Continue);
|
||
|
|
||
|
case "debugger":
|
||
|
next();
|
||
|
semicolon();
|
||
|
return new AST_Debugger();
|
||
|
|
||
|
case "do":
|
||
|
next();
|
||
|
var body = in_loop(statement);
|
||
|
expect_token("keyword", "while");
|
||
|
var condition = parenthesised();
|
||
|
semicolon(true);
|
||
|
return new AST_Do({
|
||
|
body : body,
|
||
|
condition : condition
|
||
|
});
|
||
|
|
||
|
case "while":
|
||
|
next();
|
||
|
return new AST_While({
|
||
|
condition : parenthesised(),
|
||
|
body : in_loop(statement)
|
||
|
});
|
||
|
|
||
|
case "for":
|
||
|
next();
|
||
|
return for_();
|
||
|
|
||
|
case "class":
|
||
|
next();
|
||
|
return class_(AST_DefClass);
|
||
|
|
||
|
case "function":
|
||
|
next();
|
||
|
return function_(AST_Defun, false, false, is_export_default);
|
||
|
|
||
|
case "if":
|
||
|
next();
|
||
|
return if_();
|
||
|
|
||
|
case "return":
|
||
|
if (S.in_function == 0 && !options.bare_returns)
|
||
|
croak("'return' outside of function");
|
||
|
next();
|
||
|
var value = null;
|
||
|
if (is("punc", ";")) {
|
||
|
next();
|
||
|
} else if (!can_insert_semicolon()) {
|
||
|
value = expression(true);
|
||
|
semicolon();
|
||
|
}
|
||
|
return new AST_Return({
|
||
|
value: value
|
||
|
});
|
||
|
|
||
|
case "switch":
|
||
|
next();
|
||
|
return new AST_Switch({
|
||
|
expression : parenthesised(),
|
||
|
body : in_loop(switch_body_)
|
||
|
});
|
||
|
|
||
|
case "throw":
|
||
|
next();
|
||
|
if (has_newline_before(S.token))
|
||
|
croak("Illegal newline after 'throw'");
|
||
|
var value = expression(true);
|
||
|
semicolon();
|
||
|
return new AST_Throw({
|
||
|
value: value
|
||
|
});
|
||
|
|
||
|
case "try":
|
||
|
next();
|
||
|
return try_();
|
||
|
|
||
|
case "var":
|
||
|
next();
|
||
|
var node = var_();
|
||
|
semicolon();
|
||
|
return node;
|
||
|
|
||
|
case "let":
|
||
|
next();
|
||
|
var node = let_();
|
||
|
semicolon();
|
||
|
return node;
|
||
|
|
||
|
case "const":
|
||
|
next();
|
||
|
var node = const_();
|
||
|
semicolon();
|
||
|
return node;
|
||
|
|
||
|
case "with":
|
||
|
if (S.input.has_directive("use strict")) {
|
||
|
croak("Strict mode may not include a with statement");
|
||
|
}
|
||
|
next();
|
||
|
return new AST_With({
|
||
|
expression : parenthesised(),
|
||
|
body : statement()
|
||
|
});
|
||
|
|
||
|
case "export":
|
||
|
if (!is_token(peek(), "punc", "(")) {
|
||
|
next();
|
||
|
return export_();
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
unexpected();
|
||
|
});
|
||
|
|
||
|
function labeled_statement() {
|
||
|
var label = as_symbol(AST_Label);
|
||
|
if (label.name === "await" && is_in_async()) {
|
||
|
token_error(S.prev, "await cannot be used as label inside async function");
|
||
|
}
|
||
|
if (find_if(function(l){ return l.name == label.name }, S.labels)) {
|
||
|
// ECMA-262, 12.12: An ECMAScript program is considered
|
||
|
// syntactically incorrect if it contains a
|
||
|
// LabelledStatement that is enclosed by a
|
||
|
// LabelledStatement with the same Identifier as label.
|
||
|
croak("Label " + label.name + " defined twice");
|
||
|
}
|
||
|
expect(":");
|
||
|
S.labels.push(label);
|
||
|
var stat = statement();
|
||
|
S.labels.pop();
|
||
|
if (!(stat instanceof AST_IterationStatement)) {
|
||
|
// check for `continue` that refers to this label.
|
||
|
// those should be reported as syntax errors.
|
||
|
// https://github.com/mishoo/UglifyJS2/issues/287
|
||
|
label.references.forEach(function(ref){
|
||
|
if (ref instanceof AST_Continue) {
|
||
|
ref = ref.label.start;
|
||
|
croak("Continue label `" + label.name + "` refers to non-IterationStatement.",
|
||
|
ref.line, ref.col, ref.pos);
|
||
|
}
|
||
|
});
|
||
|
}
|
||
|
return new AST_LabeledStatement({ body: stat, label: label });
|
||
|
};
|
||
|
|
||
|
function simple_statement(tmp) {
|
||
|
return new AST_SimpleStatement({ body: (tmp = expression(true), semicolon(), tmp) });
|
||
|
};
|
||
|
|
||
|
function break_cont(type) {
|
||
|
var label = null, ldef;
|
||
|
if (!can_insert_semicolon()) {
|
||
|
label = as_symbol(AST_LabelRef, true);
|
||
|
}
|
||
|
if (label != null) {
|
||
|
ldef = find_if(function(l){ return l.name == label.name }, S.labels);
|
||
|
if (!ldef)
|
||
|
croak("Undefined label " + label.name);
|
||
|
label.thedef = ldef;
|
||
|
}
|
||
|
else if (S.in_loop == 0)
|
||
|
croak(type.TYPE + " not inside a loop or switch");
|
||
|
semicolon();
|
||
|
var stat = new type({ label: label });
|
||
|
if (ldef) ldef.references.push(stat);
|
||
|
return stat;
|
||
|
};
|
||
|
|
||
|
function for_() {
|
||
|
expect("(");
|
||
|
var init = null;
|
||
|
if (!is("punc", ";")) {
|
||
|
init =
|
||
|
is("keyword", "var") ? (next(), var_(true)) :
|
||
|
is("keyword", "let") ? (next(), let_(true)) :
|
||
|
is("keyword", "const") ? (next(), const_(true)) :
|
||
|
expression(true, true);
|
||
|
var is_in = is("operator", "in");
|
||
|
var is_of = is("name", "of");
|
||
|
if (is_in || is_of) {
|
||
|
if (init instanceof AST_Definitions) {
|
||
|
if (init.definitions.length > 1)
|
||
|
croak("Only one variable declaration allowed in for..in loop", init.start.line, init.start.col, init.start.pos);
|
||
|
} else if (!(is_assignable(init) || (init = to_destructuring(init)) instanceof AST_Destructuring)) {
|
||
|
croak("Invalid left-hand side in for..in loop", init.start.line, init.start.col, init.start.pos);
|
||
|
}
|
||
|
next();
|
||
|
if (is_in) {
|
||
|
return for_in(init);
|
||
|
} else {
|
||
|
return for_of(init);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
return regular_for(init);
|
||
|
};
|
||
|
|
||
|
function regular_for(init) {
|
||
|
expect(";");
|
||
|
var test = is("punc", ";") ? null : expression(true);
|
||
|
expect(";");
|
||
|
var step = is("punc", ")") ? null : expression(true);
|
||
|
expect(")");
|
||
|
return new AST_For({
|
||
|
init : init,
|
||
|
condition : test,
|
||
|
step : step,
|
||
|
body : in_loop(statement)
|
||
|
});
|
||
|
};
|
||
|
|
||
|
function for_of(init) {
|
||
|
var lhs = init instanceof AST_Definitions ? init.definitions[0].name : null;
|
||
|
var obj = expression(true);
|
||
|
expect(")");
|
||
|
return new AST_ForOf({
|
||
|
init : init,
|
||
|
name : lhs,
|
||
|
object : obj,
|
||
|
body : in_loop(statement)
|
||
|
});
|
||
|
};
|
||
|
|
||
|
function for_in(init) {
|
||
|
var obj = expression(true);
|
||
|
expect(")");
|
||
|
return new AST_ForIn({
|
||
|
init : init,
|
||
|
object : obj,
|
||
|
body : in_loop(statement)
|
||
|
});
|
||
|
};
|
||
|
|
||
|
var arrow_function = function(start, argnames, is_async) {
|
||
|
if (has_newline_before(S.token)) {
|
||
|
croak("Unexpected newline before arrow (=>)");
|
||
|
}
|
||
|
|
||
|
expect_token("arrow", "=>");
|
||
|
|
||
|
var body = _function_body(is("punc", "{"), false, is_async);
|
||
|
|
||
|
return new AST_Arrow({
|
||
|
start : start,
|
||
|
end : body.end,
|
||
|
async : is_async,
|
||
|
argnames : argnames,
|
||
|
body : body
|
||
|
});
|
||
|
};
|
||
|
|
||
|
var function_ = function(ctor, is_generator_property, is_async, is_export_default) {
|
||
|
if (is_generator_property && is_async) croak("generators cannot be async");
|
||
|
var start = S.token;
|
||
|
|
||
|
var in_statement = ctor === AST_Defun;
|
||
|
var is_generator = is("operator", "*");
|
||
|
if (is_generator) {
|
||
|
next();
|
||
|
}
|
||
|
|
||
|
var name = is("name") ? as_symbol(in_statement ? AST_SymbolDefun : AST_SymbolLambda) : null;
|
||
|
if (in_statement && !name) {
|
||
|
if (is_export_default) {
|
||
|
ctor = AST_Function;
|
||
|
} else {
|
||
|
unexpected();
|
||
|
}
|
||
|
}
|
||
|
|
||
|
if (name && ctor !== AST_Accessor && !(name instanceof AST_SymbolDeclaration))
|
||
|
unexpected(prev());
|
||
|
|
||
|
var args = [];
|
||
|
var body = _function_body(true, is_generator || is_generator_property, is_async, name, args);
|
||
|
return new ctor({
|
||
|
start : args.start,
|
||
|
end : body.end,
|
||
|
is_generator: is_generator,
|
||
|
async : is_async,
|
||
|
name : name,
|
||
|
argnames: args,
|
||
|
body : body
|
||
|
});
|
||
|
};
|
||
|
|
||
|
function track_used_binding_identifiers(is_parameter, strict) {
|
||
|
var parameters = {};
|
||
|
var duplicate = false;
|
||
|
var default_assignment = false;
|
||
|
var spread = false;
|
||
|
var strict_mode = !!strict;
|
||
|
var tracker = {
|
||
|
add_parameter: function(token) {
|
||
|
if (parameters["$" + token.value] !== undefined) {
|
||
|
if (duplicate === false) {
|
||
|
duplicate = token;
|
||
|
}
|
||
|
tracker.check_strict();
|
||
|
} else {
|
||
|
parameters["$" + token.value] = true;
|
||
|
if (is_parameter) {
|
||
|
switch (token.value) {
|
||
|
case "arguments":
|
||
|
case "eval":
|
||
|
case "yield":
|
||
|
if (strict_mode) {
|
||
|
token_error(token, "Unexpected " + token.value + " identifier as parameter inside strict mode");
|
||
|
}
|
||
|
break;
|
||
|
default:
|
||
|
if (RESERVED_WORDS(token.value)) {
|
||
|
unexpected();
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
},
|
||
|
mark_default_assignment: function(token) {
|
||
|
if (default_assignment === false) {
|
||
|
default_assignment = token;
|
||
|
}
|
||
|
},
|
||
|
mark_spread: function(token) {
|
||
|
if (spread === false) {
|
||
|
spread = token;
|
||
|
}
|
||
|
},
|
||
|
mark_strict_mode: function() {
|
||
|
strict_mode = true;
|
||
|
},
|
||
|
is_strict: function() {
|
||
|
return default_assignment !== false || spread !== false || strict_mode
|
||
|
},
|
||
|
check_strict: function() {
|
||
|
if (tracker.is_strict() && duplicate !== false) {
|
||
|
token_error(duplicate, "Parameter " + duplicate.value + " was used already");
|
||
|
}
|
||
|
}
|
||
|
};
|
||
|
|
||
|
return tracker;
|
||
|
}
|
||
|
|
||
|
function parameters(params) {
|
||
|
var start = S.token;
|
||
|
var used_parameters = track_used_binding_identifiers(true, S.input.has_directive("use strict"));
|
||
|
|
||
|
expect("(");
|
||
|
|
||
|
while (!is("punc", ")")) {
|
||
|
var param = parameter(used_parameters);
|
||
|
params.push(param);
|
||
|
|
||
|
if (!is("punc", ")")) {
|
||
|
expect(",");
|
||
|
if (is("punc", ")") && options.ecma < 8) unexpected();
|
||
|
}
|
||
|
|
||
|
if (param instanceof AST_Expansion) {
|
||
|
break;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
next();
|
||
|
}
|
||
|
|
||
|
function parameter(used_parameters, symbol_type) {
|
||
|
var param;
|
||
|
var expand = false;
|
||
|
if (used_parameters === undefined) {
|
||
|
used_parameters = track_used_binding_identifiers(true, S.input.has_directive("use strict"));
|
||
|
}
|
||
|
if (is("expand", "...")) {
|
||
|
expand = S.token;
|
||
|
used_parameters.mark_spread(S.token);
|
||
|
next();
|
||
|
}
|
||
|
param = binding_element(used_parameters, symbol_type);
|
||
|
|
||
|
if (is("operator", "=") && expand === false) {
|
||
|
used_parameters.mark_default_assignment(S.token);
|
||
|
next();
|
||
|
param = new AST_DefaultAssign({
|
||
|
start: param.start,
|
||
|
left: param,
|
||
|
operator: "=",
|
||
|
right: expression(false),
|
||
|
end: S.token
|
||
|
});
|
||
|
}
|
||
|
|
||
|
if (expand !== false) {
|
||
|
if (!is("punc", ")")) {
|
||
|
unexpected();
|
||
|
}
|
||
|
param = new AST_Expansion({
|
||
|
start: expand,
|
||
|
expression: param,
|
||
|
end: expand
|
||
|
});
|
||
|
}
|
||
|
used_parameters.check_strict();
|
||
|
|
||
|
return param;
|
||
|
}
|
||
|
|
||
|
function binding_element(used_parameters, symbol_type) {
|
||
|
var elements = [];
|
||
|
var first = true;
|
||
|
var is_expand = false;
|
||
|
var expand_token;
|
||
|
var first_token = S.token;
|
||
|
if (used_parameters === undefined) {
|
||
|
used_parameters = track_used_binding_identifiers(false, S.input.has_directive("use strict"));
|
||
|
}
|
||
|
symbol_type = symbol_type === undefined ? AST_SymbolFunarg : symbol_type;
|
||
|
if (is("punc", "[")) {
|
||
|
next();
|
||
|
while (!is("punc", "]")) {
|
||
|
if (first) {
|
||
|
first = false;
|
||
|
} else {
|
||
|
expect(",");
|
||
|
}
|
||
|
|
||
|
if (is("expand", "...")) {
|
||
|
is_expand = true;
|
||
|
expand_token = S.token;
|
||
|
used_parameters.mark_spread(S.token);
|
||
|
next();
|
||
|
}
|
||
|
if (is("punc")) {
|
||
|
switch (S.token.value) {
|
||
|
case ",":
|
||
|
elements.push(new AST_Hole({
|
||
|
start: S.token,
|
||
|
end: S.token
|
||
|
}));
|
||
|
continue;
|
||
|
case "]": // Trailing comma after last element
|
||
|
break;
|
||
|
case "[":
|
||
|
case "{":
|
||
|
elements.push(binding_element(used_parameters, symbol_type));
|
||
|
break;
|
||
|
default:
|
||
|
unexpected();
|
||
|
}
|
||
|
} else if (is("name")) {
|
||
|
used_parameters.add_parameter(S.token);
|
||
|
elements.push(as_symbol(symbol_type));
|
||
|
} else {
|
||
|
croak("Invalid function parameter");
|
||
|
}
|
||
|
if (is("operator", "=") && is_expand === false) {
|
||
|
used_parameters.mark_default_assignment(S.token);
|
||
|
next();
|
||
|
elements[elements.length - 1] = new AST_DefaultAssign({
|
||
|
start: elements[elements.length - 1].start,
|
||
|
left: elements[elements.length - 1],
|
||
|
operator: "=",
|
||
|
right: expression(false),
|
||
|
end: S.token
|
||
|
});
|
||
|
}
|
||
|
if (is_expand) {
|
||
|
if (!is("punc", "]")) {
|
||
|
croak("Rest element must be last element");
|
||
|
}
|
||
|
elements[elements.length - 1] = new AST_Expansion({
|
||
|
start: expand_token,
|
||
|
expression: elements[elements.length - 1],
|
||
|
end: expand_token
|
||
|
});
|
||
|
}
|
||
|
}
|
||
|
expect("]");
|
||
|
used_parameters.check_strict();
|
||
|
return new AST_Destructuring({
|
||
|
start: first_token,
|
||
|
names: elements,
|
||
|
is_array: true,
|
||
|
end: prev()
|
||
|
});
|
||
|
} else if (is("punc", "{")) {
|
||
|
next();
|
||
|
while (!is("punc", "}")) {
|
||
|
if (first) {
|
||
|
first = false;
|
||
|
} else {
|
||
|
expect(",");
|
||
|
}
|
||
|
if (is("expand", "...")) {
|
||
|
is_expand = true;
|
||
|
expand_token = S.token;
|
||
|
used_parameters.mark_spread(S.token);
|
||
|
next();
|
||
|
}
|
||
|
if (is("name") && (is_token(peek(), "punc") || is_token(peek(), "operator")) && [",", "}", "="].indexOf(peek().value) !== -1) {
|
||
|
used_parameters.add_parameter(S.token);
|
||
|
var start = prev();
|
||
|
var value = as_symbol(symbol_type);
|
||
|
if (is_expand) {
|
||
|
elements.push(new AST_Expansion({
|
||
|
start: expand_token,
|
||
|
expression: value,
|
||
|
end: value.end,
|
||
|
}));
|
||
|
} else {
|
||
|
elements.push(new AST_ObjectKeyVal({
|
||
|
start: start,
|
||
|
key: value.name,
|
||
|
value: value,
|
||
|
end: value.end,
|
||
|
}));
|
||
|
}
|
||
|
} else if (is("punc", "}")) {
|
||
|
continue; // Allow trailing hole
|
||
|
} else {
|
||
|
var property_token = S.token;
|
||
|
var property = as_property_name();
|
||
|
if (property === null) {
|
||
|
unexpected(prev());
|
||
|
} else if (prev().type === "name" && !is("punc", ":")) {
|
||
|
elements.push(new AST_ObjectKeyVal({
|
||
|
start: prev(),
|
||
|
key: property,
|
||
|
value: new symbol_type({
|
||
|
start: prev(),
|
||
|
name: property,
|
||
|
end: prev()
|
||
|
}),
|
||
|
end: prev()
|
||
|
}));
|
||
|
} else {
|
||
|
expect(":");
|
||
|
elements.push(new AST_ObjectKeyVal({
|
||
|
start: property_token,
|
||
|
quote: property_token.quote,
|
||
|
key: property,
|
||
|
value: binding_element(used_parameters, symbol_type),
|
||
|
end: prev()
|
||
|
}));
|
||
|
}
|
||
|
}
|
||
|
if (is_expand) {
|
||
|
if (!is("punc", "}")) {
|
||
|
croak("Rest element must be last element");
|
||
|
}
|
||
|
}
|
||
|
else if (is("operator", "=")) {
|
||
|
used_parameters.mark_default_assignment(S.token);
|
||
|
next();
|
||
|
elements[elements.length - 1].value = new AST_DefaultAssign({
|
||
|
start: elements[elements.length - 1].value.start,
|
||
|
left: elements[elements.length - 1].value,
|
||
|
operator: "=",
|
||
|
right: expression(false),
|
||
|
end: S.token
|
||
|
});
|
||
|
}
|
||
|
}
|
||
|
expect("}");
|
||
|
used_parameters.check_strict();
|
||
|
return new AST_Destructuring({
|
||
|
start: first_token,
|
||
|
names: elements,
|
||
|
is_array: false,
|
||
|
end: prev()
|
||
|
});
|
||
|
} else if (is("name")) {
|
||
|
used_parameters.add_parameter(S.token);
|
||
|
return as_symbol(symbol_type);
|
||
|
} else {
|
||
|
croak("Invalid function parameter");
|
||
|
}
|
||
|
}
|
||
|
|
||
|
function params_or_seq_(allow_arrows, maybe_sequence) {
|
||
|
var spread_token;
|
||
|
var invalid_sequence;
|
||
|
var trailing_comma;
|
||
|
var a = [];
|
||
|
expect("(");
|
||
|
while (!is("punc", ")")) {
|
||
|
if (spread_token) unexpected(spread_token);
|
||
|
if (is("expand", "...")) {
|
||
|
spread_token = S.token;
|
||
|
if (maybe_sequence) invalid_sequence = S.token;
|
||
|
next();
|
||
|
a.push(new AST_Expansion({
|
||
|
start: prev(),
|
||
|
expression: expression(),
|
||
|
end: S.token,
|
||
|
}));
|
||
|
} else {
|
||
|
a.push(expression());
|
||
|
}
|
||
|
if (!is("punc", ")")) {
|
||
|
expect(",");
|
||
|
if (is("punc", ")")) {
|
||
|
if (options.ecma < 8) unexpected();
|
||
|
trailing_comma = prev();
|
||
|
if (maybe_sequence) invalid_sequence = trailing_comma;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
expect(")");
|
||
|
if (allow_arrows && is("arrow", "=>")) {
|
||
|
if (spread_token && trailing_comma) unexpected(trailing_comma);
|
||
|
} else if (invalid_sequence) {
|
||
|
unexpected(invalid_sequence);
|
||
|
}
|
||
|
return a;
|
||
|
}
|
||
|
|
||
|
function _function_body(block, generator, is_async, name, args) {
|
||
|
var loop = S.in_loop;
|
||
|
var labels = S.labels;
|
||
|
var current_generator = S.in_generator;
|
||
|
var current_async = S.in_async;
|
||
|
++S.in_function;
|
||
|
if (generator)
|
||
|
S.in_generator = S.in_function;
|
||
|
if (is_async)
|
||
|
S.in_async = S.in_function;
|
||
|
if (args) parameters(args);
|
||
|
if (block)
|
||
|
S.in_directives = true;
|
||
|
S.in_loop = 0;
|
||
|
S.labels = [];
|
||
|
if (block) {
|
||
|
S.input.push_directives_stack();
|
||
|
var a = block_();
|
||
|
if (name) _verify_symbol(name);
|
||
|
if (args) args.forEach(_verify_symbol);
|
||
|
S.input.pop_directives_stack();
|
||
|
} else {
|
||
|
var a = expression(false);
|
||
|
}
|
||
|
--S.in_function;
|
||
|
S.in_loop = loop;
|
||
|
S.labels = labels;
|
||
|
S.in_generator = current_generator;
|
||
|
S.in_async = current_async;
|
||
|
return a;
|
||
|
}
|
||
|
|
||
|
function _await_expression() {
|
||
|
// Previous token must be "await" and not be interpreted as an identifier
|
||
|
if (!is_in_async()) {
|
||
|
croak("Unexpected await expression outside async function",
|
||
|
S.prev.line, S.prev.col, S.prev.pos);
|
||
|
}
|
||
|
// the await expression is parsed as a unary expression in Babel
|
||
|
return new AST_Await({
|
||
|
expression : maybe_unary(true),
|
||
|
});
|
||
|
}
|
||
|
|
||
|
function _yield_expression() {
|
||
|
// Previous token must be keyword yield and not be interpret as an identifier
|
||
|
if (!is_in_generator()) {
|
||
|
croak("Unexpected yield expression outside generator function",
|
||
|
S.prev.line, S.prev.col, S.prev.pos);
|
||
|
}
|
||
|
var star = false;
|
||
|
var has_expression = true;
|
||
|
|
||
|
// Attempt to get expression or star (and then the mandatory expression)
|
||
|
// behind yield on the same line.
|
||
|
//
|
||
|
// If nothing follows on the same line of the yieldExpression,
|
||
|
// it should default to the value `undefined` for yield to return.
|
||
|
// In that case, the `undefined` stored as `null` in ast.
|
||
|
//
|
||
|
// Note 1: It isn't allowed for yield* to close without an expression
|
||
|
// Note 2: If there is a nlb between yield and star, it is interpret as
|
||
|
// yield <explicit undefined> <inserted automatic semicolon> *
|
||
|
if (can_insert_semicolon() ||
|
||
|
(is("punc") && PUNC_AFTER_EXPRESSION(S.token.value))) {
|
||
|
has_expression = false;
|
||
|
|
||
|
} else if (is("operator", "*")) {
|
||
|
star = true;
|
||
|
next();
|
||
|
}
|
||
|
|
||
|
return new AST_Yield({
|
||
|
is_star : star,
|
||
|
expression : has_expression ? expression() : null
|
||
|
});
|
||
|
}
|
||
|
|
||
|
function if_() {
|
||
|
var cond = parenthesised(), body = statement(), belse = null;
|
||
|
if (is("keyword", "else")) {
|
||
|
next();
|
||
|
belse = statement();
|
||
|
}
|
||
|
return new AST_If({
|
||
|
condition : cond,
|
||
|
body : body,
|
||
|
alternative : belse
|
||
|
});
|
||
|
};
|
||
|
|
||
|
function block_() {
|
||
|
expect("{");
|
||
|
var a = [];
|
||
|
while (!is("punc", "}")) {
|
||
|
if (is("eof")) unexpected();
|
||
|
a.push(statement());
|
||
|
}
|
||
|
next();
|
||
|
return a;
|
||
|
};
|
||
|
|
||
|
function switch_body_() {
|
||
|
expect("{");
|
||
|
var a = [], cur = null, branch = null, tmp;
|
||
|
while (!is("punc", "}")) {
|
||
|
if (is("eof")) unexpected();
|
||
|
if (is("keyword", "case")) {
|
||
|
if (branch) branch.end = prev();
|
||
|
cur = [];
|
||
|
branch = new AST_Case({
|
||
|
start : (tmp = S.token, next(), tmp),
|
||
|
expression : expression(true),
|
||
|
body : cur
|
||
|
});
|
||
|
a.push(branch);
|
||
|
expect(":");
|
||
|
}
|
||
|
else if (is("keyword", "default")) {
|
||
|
if (branch) branch.end = prev();
|
||
|
cur = [];
|
||
|
branch = new AST_Default({
|
||
|
start : (tmp = S.token, next(), expect(":"), tmp),
|
||
|
body : cur
|
||
|
});
|
||
|
a.push(branch);
|
||
|
}
|
||
|
else {
|
||
|
if (!cur) unexpected();
|
||
|
cur.push(statement());
|
||
|
}
|
||
|
}
|
||
|
if (branch) branch.end = prev();
|
||
|
next();
|
||
|
return a;
|
||
|
};
|
||
|
|
||
|
function try_() {
|
||
|
var body = block_(), bcatch = null, bfinally = null;
|
||
|
if (is("keyword", "catch")) {
|
||
|
var start = S.token;
|
||
|
next();
|
||
|
expect("(");
|
||
|
var name = parameter(undefined, AST_SymbolCatch);
|
||
|
expect(")");
|
||
|
bcatch = new AST_Catch({
|
||
|
start : start,
|
||
|
argname : name,
|
||
|
body : block_(),
|
||
|
end : prev()
|
||
|
});
|
||
|
}
|
||
|
if (is("keyword", "finally")) {
|
||
|
var start = S.token;
|
||
|
next();
|
||
|
bfinally = new AST_Finally({
|
||
|
start : start,
|
||
|
body : block_(),
|
||
|
end : prev()
|
||
|
});
|
||
|
}
|
||
|
if (!bcatch && !bfinally)
|
||
|
croak("Missing catch/finally blocks");
|
||
|
return new AST_Try({
|
||
|
body : body,
|
||
|
bcatch : bcatch,
|
||
|
bfinally : bfinally
|
||
|
});
|
||
|
};
|
||
|
|
||
|
function vardefs(no_in, kind) {
|
||
|
var a = [];
|
||
|
var def;
|
||
|
for (;;) {
|
||
|
var sym_type =
|
||
|
kind === "var" ? AST_SymbolVar :
|
||
|
kind === "const" ? AST_SymbolConst :
|
||
|
kind === "let" ? AST_SymbolLet : null;
|
||
|
if (is("punc", "{") || is("punc", "[")) {
|
||
|
def = new AST_VarDef({
|
||
|
start: S.token,
|
||
|
name: binding_element(undefined ,sym_type),
|
||
|
value: is("operator", "=") ? (expect_token("operator", "="), expression(false, no_in)) : null,
|
||
|
end: prev()
|
||
|
});
|
||
|
} else {
|
||
|
def = new AST_VarDef({
|
||
|
start : S.token,
|
||
|
name : as_symbol(sym_type),
|
||
|
value : is("operator", "=")
|
||
|
? (next(), expression(false, no_in))
|
||
|
: !no_in && kind === "const"
|
||
|
? croak("Missing initializer in const declaration") : null,
|
||
|
end : prev()
|
||
|
});
|
||
|
if (def.name.name == "import") croak("Unexpected token: import");
|
||
|
}
|
||
|
a.push(def);
|
||
|
if (!is("punc", ","))
|
||
|
break;
|
||
|
next();
|
||
|
}
|
||
|
return a;
|
||
|
};
|
||
|
|
||
|
var var_ = function(no_in) {
|
||
|
return new AST_Var({
|
||
|
start : prev(),
|
||
|
definitions : vardefs(no_in, "var"),
|
||
|
end : prev()
|
||
|
});
|
||
|
};
|
||
|
|
||
|
var let_ = function(no_in) {
|
||
|
return new AST_Let({
|
||
|
start : prev(),
|
||
|
definitions : vardefs(no_in, "let"),
|
||
|
end : prev()
|
||
|
});
|
||
|
};
|
||
|
|
||
|
var const_ = function(no_in) {
|
||
|
return new AST_Const({
|
||
|
start : prev(),
|
||
|
definitions : vardefs(no_in, "const"),
|
||
|
end : prev()
|
||
|
});
|
||
|
};
|
||
|
|
||
|
var new_ = function(allow_calls) {
|
||
|
var start = S.token;
|
||
|
expect_token("operator", "new");
|
||
|
if (is("punc", ".")) {
|
||
|
next();
|
||
|
expect_token("name", "target");
|
||
|
return subscripts(new AST_NewTarget({
|
||
|
start : start,
|
||
|
end : prev()
|
||
|
}), allow_calls);
|
||
|
}
|
||
|
var newexp = expr_atom(false), args;
|
||
|
if (is("punc", "(")) {
|
||
|
next();
|
||
|
args = expr_list(")", options.ecma >= 8);
|
||
|
} else {
|
||
|
args = [];
|
||
|
}
|
||
|
var call = new AST_New({
|
||
|
start : start,
|
||
|
expression : newexp,
|
||
|
args : args,
|
||
|
end : prev()
|
||
|
});
|
||
|
mark_pure(call);
|
||
|
return subscripts(call, allow_calls);
|
||
|
};
|
||
|
|
||
|
function as_atom_node() {
|
||
|
var tok = S.token, ret;
|
||
|
switch (tok.type) {
|
||
|
case "name":
|
||
|
ret = _make_symbol(AST_SymbolRef);
|
||
|
break;
|
||
|
case "num":
|
||
|
ret = new AST_Number({ start: tok, end: tok, value: tok.value });
|
||
|
break;
|
||
|
case "string":
|
||
|
ret = new AST_String({
|
||
|
start : tok,
|
||
|
end : tok,
|
||
|
value : tok.value,
|
||
|
quote : tok.quote
|
||
|
});
|
||
|
break;
|
||
|
case "regexp":
|
||
|
ret = new AST_RegExp({ start: tok, end: tok, value: tok.value });
|
||
|
break;
|
||
|
case "atom":
|
||
|
switch (tok.value) {
|
||
|
case "false":
|
||
|
ret = new AST_False({ start: tok, end: tok });
|
||
|
break;
|
||
|
case "true":
|
||
|
ret = new AST_True({ start: tok, end: tok });
|
||
|
break;
|
||
|
case "null":
|
||
|
ret = new AST_Null({ start: tok, end: tok });
|
||
|
break;
|
||
|
}
|
||
|
break;
|
||
|
}
|
||
|
next();
|
||
|
return ret;
|
||
|
};
|
||
|
|
||
|
function to_fun_args(ex, _, __, default_seen_above) {
|
||
|
var insert_default = function(ex, default_value) {
|
||
|
if (default_value) {
|
||
|
return new AST_DefaultAssign({
|
||
|
start: ex.start,
|
||
|
left: ex,
|
||
|
operator: "=",
|
||
|
right: default_value,
|
||
|
end: default_value.end
|
||
|
});
|
||
|
}
|
||
|
return ex;
|
||
|
}
|
||
|
if (ex instanceof AST_Object) {
|
||
|
return insert_default(new AST_Destructuring({
|
||
|
start: ex.start,
|
||
|
end: ex.end,
|
||
|
is_array: false,
|
||
|
names: ex.properties.map(to_fun_args)
|
||
|
}), default_seen_above);
|
||
|
} else if (ex instanceof AST_ObjectKeyVal) {
|
||
|
ex.value = to_fun_args(ex.value, 0, [ex.key]);
|
||
|
return insert_default(ex, default_seen_above);
|
||
|
} else if (ex instanceof AST_Hole) {
|
||
|
return ex;
|
||
|
} else if (ex instanceof AST_Destructuring) {
|
||
|
ex.names = ex.names.map(to_fun_args);
|
||
|
return insert_default(ex, default_seen_above);
|
||
|
} else if (ex instanceof AST_SymbolRef) {
|
||
|
return insert_default(new AST_SymbolFunarg({
|
||
|
name: ex.name,
|
||
|
start: ex.start,
|
||
|
end: ex.end
|
||
|
}), default_seen_above);
|
||
|
} else if (ex instanceof AST_Expansion) {
|
||
|
ex.expression = to_fun_args(ex.expression);
|
||
|
return insert_default(ex, default_seen_above);
|
||
|
} else if (ex instanceof AST_Array) {
|
||
|
return insert_default(new AST_Destructuring({
|
||
|
start: ex.start,
|
||
|
end: ex.end,
|
||
|
is_array: true,
|
||
|
names: ex.elements.map(to_fun_args)
|
||
|
}), default_seen_above);
|
||
|
} else if (ex instanceof AST_Assign) {
|
||
|
return insert_default(to_fun_args(ex.left, undefined, undefined, ex.right), default_seen_above);
|
||
|
} else if (ex instanceof AST_DefaultAssign) {
|
||
|
ex.left = to_fun_args(ex.left, 0, [ex.left]);
|
||
|
return ex;
|
||
|
} else {
|
||
|
croak("Invalid function parameter", ex.start.line, ex.start.col);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
var expr_atom = function(allow_calls, allow_arrows) {
|
||
|
if (is("operator", "new")) {
|
||
|
return new_(allow_calls);
|
||
|
}
|
||
|
var start = S.token;
|
||
|
var async = is("name", "async") && as_atom_node();
|
||
|
if (is("punc")) {
|
||
|
switch (S.token.value) {
|
||
|
case "(":
|
||
|
if (async && !allow_calls) break;
|
||
|
var exprs = params_or_seq_(allow_arrows, !async);
|
||
|
if (allow_arrows && is("arrow", "=>")) {
|
||
|
return arrow_function(start, exprs.map(to_fun_args), !!async);
|
||
|
}
|
||
|
var ex = async ? new AST_Call({
|
||
|
expression: async,
|
||
|
args: exprs
|
||
|
}) : exprs.length == 1 ? exprs[0] : new AST_Sequence({
|
||
|
expressions: exprs
|
||
|
});
|
||
|
if (ex.start) {
|
||
|
var len = start.comments_before.length;
|
||
|
[].unshift.apply(ex.start.comments_before, start.comments_before);
|
||
|
start.comments_before = ex.start.comments_before;
|
||
|
start.comments_before_length = len;
|
||
|
if (len == 0 && start.comments_before.length > 0) {
|
||
|
var comment = start.comments_before[0];
|
||
|
if (!comment.nlb) {
|
||
|
comment.nlb = start.nlb;
|
||
|
start.nlb = false;
|
||
|
}
|
||
|
}
|
||
|
start.comments_after = ex.start.comments_after;
|
||
|
}
|
||
|
ex.start = start;
|
||
|
var end = prev();
|
||
|
if (ex.end) {
|
||
|
end.comments_before = ex.end.comments_before;
|
||
|
[].push.apply(ex.end.comments_after, end.comments_after);
|
||
|
end.comments_after = ex.end.comments_after;
|
||
|
}
|
||
|
ex.end = end;
|
||
|
if (ex instanceof AST_Call) mark_pure(ex);
|
||
|
return subscripts(ex, allow_calls);
|
||
|
case "[":
|
||
|
return subscripts(array_(), allow_calls);
|
||
|
case "{":
|
||
|
return subscripts(object_or_destructuring_(), allow_calls);
|
||
|
}
|
||
|
if (!async) unexpected();
|
||
|
}
|
||
|
if (allow_arrows && is("name") && is_token(peek(), "arrow")) {
|
||
|
var param = new AST_SymbolFunarg({
|
||
|
name: S.token.value,
|
||
|
start: start,
|
||
|
end: start,
|
||
|
});
|
||
|
next();
|
||
|
return arrow_function(start, [param], !!async);
|
||
|
}
|
||
|
if (is("keyword", "function")) {
|
||
|
next();
|
||
|
var func = function_(AST_Function, false, !!async);
|
||
|
func.start = start;
|
||
|
func.end = prev();
|
||
|
return subscripts(func, allow_calls);
|
||
|
}
|
||
|
if (async) return subscripts(async, allow_calls);
|
||
|
if (is("keyword", "class")) {
|
||
|
next();
|
||
|
var cls = class_(AST_ClassExpression);
|
||
|
cls.start = start;
|
||
|
cls.end = prev();
|
||
|
return subscripts(cls, allow_calls);
|
||
|
}
|
||
|
if (is("template_head")) {
|
||
|
return subscripts(template_string(), allow_calls);
|
||
|
}
|
||
|
if (ATOMIC_START_TOKEN(S.token.type)) {
|
||
|
return subscripts(as_atom_node(), allow_calls);
|
||
|
}
|
||
|
unexpected();
|
||
|
};
|
||
|
|
||
|
function template_string() {
|
||
|
var segments = [], start = S.token;
|
||
|
|
||
|
segments.push(new AST_TemplateSegment({
|
||
|
start: S.token,
|
||
|
raw: S.token.raw,
|
||
|
value: S.token.value,
|
||
|
end: S.token
|
||
|
}));
|
||
|
while (S.token.end === false) {
|
||
|
next();
|
||
|
handle_regexp();
|
||
|
segments.push(expression(true));
|
||
|
|
||
|
if (!is_token("template_substitution")) {
|
||
|
unexpected();
|
||
|
}
|
||
|
|
||
|
segments.push(new AST_TemplateSegment({
|
||
|
start: S.token,
|
||
|
raw: S.token.raw,
|
||
|
value: S.token.value,
|
||
|
end: S.token
|
||
|
}));
|
||
|
}
|
||
|
next();
|
||
|
|
||
|
return new AST_TemplateString({
|
||
|
start: start,
|
||
|
segments: segments,
|
||
|
end: S.token
|
||
|
});
|
||
|
}
|
||
|
|
||
|
function expr_list(closing, allow_trailing_comma, allow_empty) {
|
||
|
var first = true, a = [];
|
||
|
while (!is("punc", closing)) {
|
||
|
if (first) first = false; else expect(",");
|
||
|
if (allow_trailing_comma && is("punc", closing)) break;
|
||
|
if (is("punc", ",") && allow_empty) {
|
||
|
a.push(new AST_Hole({ start: S.token, end: S.token }));
|
||
|
} else if (is("expand", "...")) {
|
||
|
next();
|
||
|
a.push(new AST_Expansion({start: prev(), expression: expression(),end: S.token}));
|
||
|
} else {
|
||
|
a.push(expression(false));
|
||
|
}
|
||
|
}
|
||
|
next();
|
||
|
return a;
|
||
|
};
|
||
|
|
||
|
var array_ = embed_tokens(function() {
|
||
|
expect("[");
|
||
|
return new AST_Array({
|
||
|
elements: expr_list("]", !options.strict, true)
|
||
|
});
|
||
|
});
|
||
|
|
||
|
var create_accessor = embed_tokens(function(is_generator, is_async) {
|
||
|
return function_(AST_Accessor, is_generator, is_async);
|
||
|
});
|
||
|
|
||
|
var object_or_destructuring_ = embed_tokens(function object_or_destructuring_() {
|
||
|
var start = S.token, first = true, a = [];
|
||
|
expect("{");
|
||
|
while (!is("punc", "}")) {
|
||
|
if (first) first = false; else expect(",");
|
||
|
if (!options.strict && is("punc", "}"))
|
||
|
// allow trailing comma
|
||
|
break;
|
||
|
|
||
|
start = S.token;
|
||
|
if (start.type == "expand") {
|
||
|
next();
|
||
|
a.push(new AST_Expansion({
|
||
|
start: start,
|
||
|
expression: expression(false),
|
||
|
end: prev(),
|
||
|
}));
|
||
|
continue;
|
||
|
}
|
||
|
|
||
|
var name = as_property_name();
|
||
|
var value;
|
||
|
|
||
|
// Check property and fetch value
|
||
|
if (!is("punc", ":")) {
|
||
|
var concise = concise_method_or_getset(name, start);
|
||
|
if (concise) {
|
||
|
a.push(concise);
|
||
|
continue;
|
||
|
}
|
||
|
|
||
|
value = new AST_SymbolRef({
|
||
|
start: prev(),
|
||
|
name: name,
|
||
|
end: prev()
|
||
|
});
|
||
|
} else if (name === null) {
|
||
|
unexpected(prev());
|
||
|
} else {
|
||
|
next(); // `:` - see first condition
|
||
|
value = expression(false);
|
||
|
}
|
||
|
|
||
|
// Check for default value and alter value accordingly if necessary
|
||
|
if (is("operator", "=")) {
|
||
|
next();
|
||
|
value = new AST_Assign({
|
||
|
start: start,
|
||
|
left: value,
|
||
|
operator: "=",
|
||
|
right: expression(false),
|
||
|
end: prev()
|
||
|
});
|
||
|
}
|
||
|
|
||
|
// Create property
|
||
|
a.push(new AST_ObjectKeyVal({
|
||
|
start: start,
|
||
|
quote: start.quote,
|
||
|
key: name instanceof AST_Node ? name : "" + name,
|
||
|
value: value,
|
||
|
end: prev()
|
||
|
}));
|
||
|
}
|
||
|
next();
|
||
|
return new AST_Object({ properties: a })
|
||
|
});
|
||
|
|
||
|
function class_(KindOfClass) {
|
||
|
var start, method, class_name, extends_, a = [];
|
||
|
|
||
|
S.input.push_directives_stack(); // Push directive stack, but not scope stack
|
||
|
S.input.add_directive("use strict");
|
||
|
|
||
|
if (S.token.type == "name" && S.token.value != "extends") {
|
||
|
class_name = as_symbol(KindOfClass === AST_DefClass ? AST_SymbolDefClass : AST_SymbolClass);
|
||
|
}
|
||
|
|
||
|
if (KindOfClass === AST_DefClass && !class_name) {
|
||
|
unexpected();
|
||
|
}
|
||
|
|
||
|
if (S.token.value == "extends") {
|
||
|
next();
|
||
|
extends_ = expression(true);
|
||
|
}
|
||
|
|
||
|
expect("{");
|
||
|
|
||
|
if (is("punc", ";")) { next(); } // Leading semicolons are okay in class bodies.
|
||
|
while (!is("punc", "}")) {
|
||
|
start = S.token;
|
||
|
method = concise_method_or_getset(as_property_name(), start, true);
|
||
|
if (!method) { unexpected(); }
|
||
|
a.push(method);
|
||
|
if (is("punc", ";")) { next(); }
|
||
|
}
|
||
|
|
||
|
S.input.pop_directives_stack();
|
||
|
|
||
|
next();
|
||
|
|
||
|
return new KindOfClass({
|
||
|
start: start,
|
||
|
name: class_name,
|
||
|
extends: extends_,
|
||
|
properties: a,
|
||
|
end: prev(),
|
||
|
});
|
||
|
}
|
||
|
|
||
|
function concise_method_or_getset(name, start, is_class) {
|
||
|
var get_ast = function(name, token) {
|
||
|
if (typeof name === "string" || typeof name === "number") {
|
||
|
return new AST_SymbolMethod({
|
||
|
start: token,
|
||
|
name: "" + name,
|
||
|
end: prev()
|
||
|
});
|
||
|
} else if (name === null) {
|
||
|
unexpected();
|
||
|
}
|
||
|
return name;
|
||
|
}
|
||
|
var is_async = false;
|
||
|
var is_static = false;
|
||
|
var is_generator = false;
|
||
|
var property_token = start;
|
||
|
if (is_class && name === "static" && !is("punc", "(")) {
|
||
|
is_static = true;
|
||
|
property_token = S.token;
|
||
|
name = as_property_name();
|
||
|
}
|
||
|
if (name === "async" && !is("punc", "(") && !is("punc", ",") && !is("punc", "}")) {
|
||
|
is_async = true;
|
||
|
property_token = S.token;
|
||
|
name = as_property_name();
|
||
|
}
|
||
|
if (name === null) {
|
||
|
is_generator = true;
|
||
|
property_token = S.token;
|
||
|
name = as_property_name();
|
||
|
if (name === null) {
|
||
|
unexpected();
|
||
|
}
|
||
|
}
|
||
|
if (is("punc", "(")) {
|
||
|
name = get_ast(name, start);
|
||
|
var node = new AST_ConciseMethod({
|
||
|
start : start,
|
||
|
static : is_static,
|
||
|
is_generator: is_generator,
|
||
|
async : is_async,
|
||
|
key : name,
|
||
|
quote : name instanceof AST_SymbolMethod ?
|
||
|
property_token.quote : undefined,
|
||
|
value : create_accessor(is_generator, is_async),
|
||
|
end : prev()
|
||
|
});
|
||
|
return node;
|
||
|
}
|
||
|
property_token = S.token;
|
||
|
if (name == "get") {
|
||
|
if (!is("punc") || is("punc", "[")) {
|
||
|
name = get_ast(as_property_name(), start);
|
||
|
return new AST_ObjectGetter({
|
||
|
start : start,
|
||
|
static: is_static,
|
||
|
key : name,
|
||
|
quote : name instanceof AST_SymbolMethod ?
|
||
|
property_token.quote : undefined,
|
||
|
value : create_accessor(),
|
||
|
end : prev()
|
||
|
});
|
||
|
}
|
||
|
}
|
||
|
else if (name == "set") {
|
||
|
if (!is("punc") || is("punc", "[")) {
|
||
|
name = get_ast(as_property_name(), start);
|
||
|
return new AST_ObjectSetter({
|
||
|
start : start,
|
||
|
static: is_static,
|
||
|
key : name,
|
||
|
quote : name instanceof AST_SymbolMethod ?
|
||
|
property_token.quote : undefined,
|
||
|
value : create_accessor(),
|
||
|
end : prev()
|
||
|
});
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
function import_() {
|
||
|
var start = prev();
|
||
|
var imported_name;
|
||
|
var imported_names;
|
||
|
if (is("name")) {
|
||
|
imported_name = as_symbol(AST_SymbolImport);
|
||
|
}
|
||
|
|
||
|
if (is("punc", ",")) {
|
||
|
next();
|
||
|
}
|
||
|
|
||
|
imported_names = map_names(true);
|
||
|
|
||
|
if (imported_names || imported_name) {
|
||
|
expect_token("name", "from");
|
||
|
}
|
||
|
var mod_str = S.token;
|
||
|
if (mod_str.type !== 'string') {
|
||
|
unexpected();
|
||
|
}
|
||
|
next();
|
||
|
return new AST_Import({
|
||
|
start: start,
|
||
|
imported_name: imported_name,
|
||
|
imported_names: imported_names,
|
||
|
module_name: new AST_String({
|
||
|
start: mod_str,
|
||
|
value: mod_str.value,
|
||
|
quote: mod_str.quote,
|
||
|
end: mod_str,
|
||
|
}),
|
||
|
end: S.token,
|
||
|
});
|
||
|
}
|
||
|
|
||
|
function map_name(is_import) {
|
||
|
function make_symbol(type) {
|
||
|
return new type({
|
||
|
name: as_property_name(),
|
||
|
start: prev(),
|
||
|
end: prev()
|
||
|
});
|
||
|
}
|
||
|
|
||
|
var foreign_type = is_import ? AST_SymbolImportForeign : AST_SymbolExportForeign;
|
||
|
var type = is_import ? AST_SymbolImport : AST_SymbolExport;
|
||
|
var start = S.token;
|
||
|
var foreign_name;
|
||
|
var name;
|
||
|
|
||
|
if (is_import) {
|
||
|
foreign_name = make_symbol(foreign_type);
|
||
|
} else {
|
||
|
name = make_symbol(type);
|
||
|
}
|
||
|
if (is("name", "as")) {
|
||
|
next(); // The "as" word
|
||
|
if (is_import) {
|
||
|
name = make_symbol(type);
|
||
|
} else {
|
||
|
foreign_name = make_symbol(foreign_type);
|
||
|
}
|
||
|
} else if (is_import) {
|
||
|
name = new type(foreign_name);
|
||
|
} else {
|
||
|
foreign_name = new foreign_type(name);
|
||
|
}
|
||
|
|
||
|
return new AST_NameMapping({
|
||
|
start: start,
|
||
|
foreign_name: foreign_name,
|
||
|
name: name,
|
||
|
end: prev(),
|
||
|
})
|
||
|
}
|
||
|
|
||
|
function map_nameAsterisk(is_import, name) {
|
||
|
var foreign_type = is_import ? AST_SymbolImportForeign : AST_SymbolExportForeign;
|
||
|
var type = is_import ? AST_SymbolImport : AST_SymbolExport;
|
||
|
var start = S.token;
|
||
|
var foreign_name;
|
||
|
var end = prev();
|
||
|
|
||
|
name = name || new type({
|
||
|
name: '*',
|
||
|
start: start,
|
||
|
end: end,
|
||
|
});
|
||
|
|
||
|
foreign_name = new foreign_type({
|
||
|
name: '*',
|
||
|
start: start,
|
||
|
end: end,
|
||
|
});
|
||
|
|
||
|
return new AST_NameMapping({
|
||
|
start: start,
|
||
|
foreign_name: foreign_name,
|
||
|
name: name,
|
||
|
end: end,
|
||
|
})
|
||
|
}
|
||
|
|
||
|
function map_names(is_import) {
|
||
|
var names;
|
||
|
if (is("punc", "{")) {
|
||
|
next();
|
||
|
names = [];
|
||
|
while (!is("punc", "}")) {
|
||
|
names.push(map_name(is_import));
|
||
|
if (is("punc", ",")) {
|
||
|
next();
|
||
|
}
|
||
|
}
|
||
|
next();
|
||
|
} else if (is("operator", "*")) {
|
||
|
var name;
|
||
|
next();
|
||
|
if (is_import && is("name", "as")) {
|
||
|
next(); // The "as" word
|
||
|
name = as_symbol(AST_SymbolImportForeign);
|
||
|
}
|
||
|
names = [map_nameAsterisk(is_import, name)];
|
||
|
}
|
||
|
return names;
|
||
|
}
|
||
|
|
||
|
function export_() {
|
||
|
var start = S.token;
|
||
|
var is_default;
|
||
|
var exported_names;
|
||
|
|
||
|
if (is("keyword", "default")) {
|
||
|
is_default = true;
|
||
|
next();
|
||
|
} else if (exported_names = map_names(false)) {
|
||
|
if (is("name", "from")) {
|
||
|
next();
|
||
|
|
||
|
var mod_str = S.token;
|
||
|
if (mod_str.type !== 'string') {
|
||
|
unexpected();
|
||
|
}
|
||
|
next();
|
||
|
|
||
|
return new AST_Export({
|
||
|
start: start,
|
||
|
is_default: is_default,
|
||
|
exported_names: exported_names,
|
||
|
module_name: new AST_String({
|
||
|
start: mod_str,
|
||
|
value: mod_str.value,
|
||
|
quote: mod_str.quote,
|
||
|
end: mod_str,
|
||
|
}),
|
||
|
end: prev(),
|
||
|
});
|
||
|
} else {
|
||
|
return new AST_Export({
|
||
|
start: start,
|
||
|
is_default: is_default,
|
||
|
exported_names: exported_names,
|
||
|
end: prev(),
|
||
|
});
|
||
|
}
|
||
|
}
|
||
|
|
||
|
var node;
|
||
|
var exported_value;
|
||
|
var exported_definition;
|
||
|
if (is("punc", "{")
|
||
|
|| is_default
|
||
|
&& (is("keyword", "class") || is("keyword", "function"))
|
||
|
&& is_token(peek(), "punc")) {
|
||
|
exported_value = expression(false);
|
||
|
semicolon();
|
||
|
} else if ((node = statement(is_default)) instanceof AST_Definitions && is_default) {
|
||
|
unexpected(node.start);
|
||
|
} else if (node instanceof AST_Definitions || node instanceof AST_Lambda || node instanceof AST_DefClass) {
|
||
|
exported_definition = node;
|
||
|
} else if (node instanceof AST_SimpleStatement) {
|
||
|
exported_value = node.body;
|
||
|
} else {
|
||
|
unexpected(node.start);
|
||
|
}
|
||
|
|
||
|
return new AST_Export({
|
||
|
start: start,
|
||
|
is_default: is_default,
|
||
|
exported_value: exported_value,
|
||
|
exported_definition: exported_definition,
|
||
|
end: prev(),
|
||
|
});
|
||
|
}
|
||
|
|
||
|
function as_property_name() {
|
||
|
var tmp = S.token;
|
||
|
switch (tmp.type) {
|
||
|
case "punc":
|
||
|
if (tmp.value === "[") {
|
||
|
next();
|
||
|
var ex = expression(false);
|
||
|
expect("]");
|
||
|
return ex;
|
||
|
} else unexpected(tmp);
|
||
|
case "operator":
|
||
|
if (tmp.value === "*") {
|
||
|
next();
|
||
|
return null;
|
||
|
}
|
||
|
if (["delete", "in", "instanceof", "new", "typeof", "void"].indexOf(tmp.value) === -1) {
|
||
|
unexpected(tmp);
|
||
|
}
|
||
|
case "name":
|
||
|
if (tmp.value == "yield") {
|
||
|
if (is_in_generator()) {
|
||
|
token_error(tmp, "Yield cannot be used as identifier inside generators");
|
||
|
} else if (!is_token(peek(), "punc", ":")
|
||
|
&& !is_token(peek(), "punc", "(")
|
||
|
&& S.input.has_directive("use strict")) {
|
||
|
token_error(tmp, "Unexpected yield identifier inside strict mode");
|
||
|
}
|
||
|
}
|
||
|
case "string":
|
||
|
case "num":
|
||
|
case "keyword":
|
||
|
case "atom":
|
||
|
next();
|
||
|
return tmp.value;
|
||
|
default:
|
||
|
unexpected(tmp);
|
||
|
}
|
||
|
};
|
||
|
|
||
|
function as_name() {
|
||
|
var tmp = S.token;
|
||
|
if (tmp.type != "name") unexpected();
|
||
|
next();
|
||
|
return tmp.value;
|
||
|
};
|
||
|
|
||
|
function _make_symbol(type) {
|
||
|
var name = S.token.value;
|
||
|
return new (name == "this" ? AST_This :
|
||
|
name == "super" ? AST_Super :
|
||
|
type)({
|
||
|
name : String(name),
|
||
|
start : S.token,
|
||
|
end : S.token
|
||
|
});
|
||
|
};
|
||
|
|
||
|
function _verify_symbol(sym) {
|
||
|
var name = sym.name;
|
||
|
if (is_in_generator() && name == "yield") {
|
||
|
token_error(sym.start, "Yield cannot be used as identifier inside generators");
|
||
|
}
|
||
|
if (S.input.has_directive("use strict")) {
|
||
|
if (name == "yield") {
|
||
|
token_error(sym.start, "Unexpected yield identifier inside strict mode");
|
||
|
}
|
||
|
if (sym instanceof AST_SymbolDeclaration && (name == "arguments" || name == "eval")) {
|
||
|
token_error(sym.start, "Unexpected " + name + " in strict mode");
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
function as_symbol(type, noerror) {
|
||
|
if (!is("name")) {
|
||
|
if (!noerror) croak("Name expected");
|
||
|
return null;
|
||
|
}
|
||
|
var sym = _make_symbol(type);
|
||
|
_verify_symbol(sym);
|
||
|
next();
|
||
|
return sym;
|
||
|
};
|
||
|
|
||
|
function mark_pure(call) {
|
||
|
var start = call.start;
|
||
|
var comments = start.comments_before;
|
||
|
var i = HOP(start, "comments_before_length") ? start.comments_before_length : comments.length;
|
||
|
while (--i >= 0) {
|
||
|
var comment = comments[i];
|
||
|
if (/[@#]__PURE__/.test(comment.value)) {
|
||
|
call.pure = comment;
|
||
|
break;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
var subscripts = function(expr, allow_calls) {
|
||
|
var start = expr.start;
|
||
|
if (is("punc", ".")) {
|
||
|
next();
|
||
|
return subscripts(new AST_Dot({
|
||
|
start : start,
|
||
|
expression : expr,
|
||
|
property : as_name(),
|
||
|
end : prev()
|
||
|
}), allow_calls);
|
||
|
}
|
||
|
if (is("punc", "[")) {
|
||
|
next();
|
||
|
var prop = expression(true);
|
||
|
expect("]");
|
||
|
return subscripts(new AST_Sub({
|
||
|
start : start,
|
||
|
expression : expr,
|
||
|
property : prop,
|
||
|
end : prev()
|
||
|
}), allow_calls);
|
||
|
}
|
||
|
if (allow_calls && is("punc", "(")) {
|
||
|
next();
|
||
|
var call = new AST_Call({
|
||
|
start : start,
|
||
|
expression : expr,
|
||
|
args : call_args(),
|
||
|
end : prev()
|
||
|
});
|
||
|
mark_pure(call);
|
||
|
return subscripts(call, true);
|
||
|
}
|
||
|
if (is("template_head")) {
|
||
|
return subscripts(new AST_PrefixedTemplateString({
|
||
|
start: start,
|
||
|
prefix: expr,
|
||
|
template_string: template_string()
|
||
|
}), allow_calls);
|
||
|
}
|
||
|
return expr;
|
||
|
};
|
||
|
|
||
|
var call_args = embed_tokens(function _call_args() {
|
||
|
var args = [];
|
||
|
while (!is("punc", ")")) {
|
||
|
if (is("expand", "...")) {
|
||
|
next();
|
||
|
args.push(new AST_Expansion({
|
||
|
start: prev(),
|
||
|
expression: expression(false)
|
||
|
}));
|
||
|
} else {
|
||
|
args.push(expression(false));
|
||
|
}
|
||
|
if (!is("punc", ")")) {
|
||
|
expect(",");
|
||
|
if (is("punc", ")") && options.ecma < 8) unexpected();
|
||
|
}
|
||
|
}
|
||
|
next();
|
||
|
return args;
|
||
|
});
|
||
|
|
||
|
var maybe_unary = function(allow_calls, allow_arrows) {
|
||
|
var start = S.token;
|
||
|
if (start.type == "name" && start.value == "await") {
|
||
|
if (is_in_async()) {
|
||
|
next();
|
||
|
return _await_expression();
|
||
|
} else if (S.input.has_directive("use strict")) {
|
||
|
token_error(S.token, "Unexpected await identifier inside strict mode")
|
||
|
}
|
||
|
}
|
||
|
if (is("operator") && UNARY_PREFIX(start.value)) {
|
||
|
next();
|
||
|
handle_regexp();
|
||
|
var ex = make_unary(AST_UnaryPrefix, start, maybe_unary(allow_calls));
|
||
|
ex.start = start;
|
||
|
ex.end = prev();
|
||
|
return ex;
|
||
|
}
|
||
|
var val = expr_atom(allow_calls, allow_arrows);
|
||
|
while (is("operator") && UNARY_POSTFIX(S.token.value) && !has_newline_before(S.token)) {
|
||
|
if (val instanceof AST_Arrow) unexpected();
|
||
|
val = make_unary(AST_UnaryPostfix, S.token, val);
|
||
|
val.start = start;
|
||
|
val.end = S.token;
|
||
|
next();
|
||
|
}
|
||
|
return val;
|
||
|
};
|
||
|
|
||
|
function make_unary(ctor, token, expr) {
|
||
|
var op = token.value;
|
||
|
switch (op) {
|
||
|
case "++":
|
||
|
case "--":
|
||
|
if (!is_assignable(expr))
|
||
|
croak("Invalid use of " + op + " operator", token.line, token.col, token.pos);
|
||
|
break;
|
||
|
case "delete":
|
||
|
if (expr instanceof AST_SymbolRef && S.input.has_directive("use strict"))
|
||
|
croak("Calling delete on expression not allowed in strict mode", expr.start.line, expr.start.col, expr.start.pos);
|
||
|
break;
|
||
|
}
|
||
|
return new ctor({ operator: op, expression: expr });
|
||
|
};
|
||
|
|
||
|
var expr_op = function(left, min_prec, no_in) {
|
||
|
var op = is("operator") ? S.token.value : null;
|
||
|
if (op == "in" && no_in) op = null;
|
||
|
if (op == "**" && left instanceof AST_UnaryPrefix
|
||
|
/* unary token in front not allowed - parenthesis required */
|
||
|
&& !is_token(left.start, "punc", "(")
|
||
|
&& left.operator !== "--" && left.operator !== "++")
|
||
|
unexpected(left.start);
|
||
|
var prec = op != null ? PRECEDENCE[op] : null;
|
||
|
if (prec != null && (prec > min_prec || (op === "**" && min_prec === prec))) {
|
||
|
next();
|
||
|
var right = expr_op(maybe_unary(true), prec, no_in);
|
||
|
return expr_op(new AST_Binary({
|
||
|
start : left.start,
|
||
|
left : left,
|
||
|
operator : op,
|
||
|
right : right,
|
||
|
end : right.end
|
||
|
}), min_prec, no_in);
|
||
|
}
|
||
|
return left;
|
||
|
};
|
||
|
|
||
|
function expr_ops(no_in) {
|
||
|
return expr_op(maybe_unary(true, true), 0, no_in);
|
||
|
};
|
||
|
|
||
|
var maybe_conditional = function(no_in) {
|
||
|
var start = S.token;
|
||
|
var expr = expr_ops(no_in);
|
||
|
if (is("operator", "?")) {
|
||
|
next();
|
||
|
var yes = expression(false);
|
||
|
expect(":");
|
||
|
return new AST_Conditional({
|
||
|
start : start,
|
||
|
condition : expr,
|
||
|
consequent : yes,
|
||
|
alternative : expression(false, no_in),
|
||
|
end : prev()
|
||
|
});
|
||
|
}
|
||
|
return expr;
|
||
|
};
|
||
|
|
||
|
function is_assignable(expr) {
|
||
|
return expr instanceof AST_PropAccess || expr instanceof AST_SymbolRef;
|
||
|
};
|
||
|
|
||
|
function to_destructuring(node) {
|
||
|
if (node instanceof AST_Object) {
|
||
|
node = new AST_Destructuring({
|
||
|
start: node.start,
|
||
|
names: node.properties.map(to_destructuring),
|
||
|
is_array: false,
|
||
|
end: node.end
|
||
|
});
|
||
|
} else if (node instanceof AST_Array) {
|
||
|
var names = [];
|
||
|
|
||
|
for (var i = 0; i < node.elements.length; i++) {
|
||
|
// Only allow expansion as last element
|
||
|
if (node.elements[i] instanceof AST_Expansion) {
|
||
|
if (i + 1 !== node.elements.length) {
|
||
|
token_error(node.elements[i].start, "Spread must the be last element in destructuring array");
|
||
|
}
|
||
|
node.elements[i].expression = to_destructuring(node.elements[i].expression);
|
||
|
}
|
||
|
|
||
|
names.push(to_destructuring(node.elements[i]));
|
||
|
}
|
||
|
|
||
|
node = new AST_Destructuring({
|
||
|
start: node.start,
|
||
|
names: names,
|
||
|
is_array: true,
|
||
|
end: node.end
|
||
|
});
|
||
|
} else if (node instanceof AST_ObjectProperty) {
|
||
|
node.value = to_destructuring(node.value);
|
||
|
} else if (node instanceof AST_Assign) {
|
||
|
node = new AST_DefaultAssign({
|
||
|
start: node.start,
|
||
|
left: node.left,
|
||
|
operator: "=",
|
||
|
right: node.right,
|
||
|
end: node.end
|
||
|
});
|
||
|
}
|
||
|
return node;
|
||
|
}
|
||
|
|
||
|
// In ES6, AssignmentExpression can also be an ArrowFunction
|
||
|
var maybe_assign = function(no_in) {
|
||
|
var start = S.token;
|
||
|
|
||
|
if (start.type == "name" && start.value == "yield") {
|
||
|
if (is_in_generator()) {
|
||
|
next();
|
||
|
return _yield_expression();
|
||
|
} else if (S.input.has_directive("use strict")) {
|
||
|
token_error(S.token, "Unexpected yield identifier inside strict mode");
|
||
|
}
|
||
|
}
|
||
|
|
||
|
var left = maybe_conditional(no_in);
|
||
|
var val = S.token.value;
|
||
|
|
||
|
if (is("operator") && ASSIGNMENT(val)) {
|
||
|
if (is_assignable(left) || (left = to_destructuring(left)) instanceof AST_Destructuring) {
|
||
|
next();
|
||
|
return new AST_Assign({
|
||
|
start : start,
|
||
|
left : left,
|
||
|
operator : val,
|
||
|
right : maybe_assign(no_in),
|
||
|
end : prev()
|
||
|
});
|
||
|
}
|
||
|
croak("Invalid assignment");
|
||
|
}
|
||
|
return left;
|
||
|
};
|
||
|
|
||
|
var expression = function(commas, no_in) {
|
||
|
var start = S.token;
|
||
|
var exprs = [];
|
||
|
while (true) {
|
||
|
exprs.push(maybe_assign(no_in));
|
||
|
if (!commas || !is("punc", ",")) break;
|
||
|
next();
|
||
|
commas = true;
|
||
|
}
|
||
|
return exprs.length == 1 ? exprs[0] : new AST_Sequence({
|
||
|
start : start,
|
||
|
expressions : exprs,
|
||
|
end : peek()
|
||
|
});
|
||
|
};
|
||
|
|
||
|
function in_loop(cont) {
|
||
|
++S.in_loop;
|
||
|
var ret = cont();
|
||
|
--S.in_loop;
|
||
|
return ret;
|
||
|
};
|
||
|
|
||
|
if (options.expression) {
|
||
|
return expression(true);
|
||
|
}
|
||
|
|
||
|
return (function(){
|
||
|
var start = S.token;
|
||
|
var body = [];
|
||
|
S.input.push_directives_stack();
|
||
|
while (!is("eof"))
|
||
|
body.push(statement());
|
||
|
S.input.pop_directives_stack();
|
||
|
var end = prev();
|
||
|
var toplevel = options.toplevel;
|
||
|
if (toplevel) {
|
||
|
toplevel.body = toplevel.body.concat(body);
|
||
|
toplevel.end = end;
|
||
|
} else {
|
||
|
toplevel = new AST_Toplevel({ start: start, body: body, end: end });
|
||
|
}
|
||
|
return toplevel;
|
||
|
})();
|
||
|
|
||
|
};
|