From f70cc0f01c50f9f82ce13d1ac073e66c3fbad3ba Mon Sep 17 00:00:00 2001 From: Marcus Date: Mon, 10 May 2021 23:13:17 -0400 Subject: [PATCH] breakout start --- Module.js | 63 ++++++++++++++++++++++++- createAst.js | 71 ++++++++++++++++++++++++++++ debug.js | 7 +++ run.js | 129 ++------------------------------------------------- 4 files changed, 145 insertions(+), 125 deletions(-) create mode 100644 createAst.js create mode 100644 debug.js diff --git a/Module.js b/Module.js index 9faa76c..e1fbd2a 100644 --- a/Module.js +++ b/Module.js @@ -24,4 +24,65 @@ export default class Module { cold: [], warm: [] } -} \ No newline at end of file + + async link() { + + } + + static async create(location) { + const module = new Module(); + const ast = createAst(location); + const name = parse(location).name; + + module.name.last = name; + module.name.full = name; + + log.ast('='.repeat(80)); + log.ast(location); + log.ast(ast); + + try { + + // move module whole loop ass bitch into module. + for (const item of ast) { + if ('name' in item && item.name in module.identifiers) + throw new Error('Identifier ' + item.name + ' already declared!'); + + module.identifiers[item.name] = item.type; + + if(item.type in module) { + await module[item.type](item); + } + + if (item.type === 'link') { + module.links + [item.required ? 'required' : 'optional'] + [item.array ? 'arrays' : 'single'] + .push(item.name); + + } else if (item.type === 'namespace') { + module.name.space = item.namespace; + module.name.full = module.name.space + '.' + module.name.last; + + } else if (item.type === 'restore') { + module.functions.restore = item.block; + + } else if (item.type === 'function') { + module.functions[item.name] = item.block; + + } else if (item.type === 'import') { + const imported = await import(item.importName); + if('default' in imported) module.imports[item.name] = imported.default; + else module.imports[item.name] = imported; + + } else if (item.type === 'variable') { + module.variables[item.persist ? 'cold' : 'warm'].push(item.name); + } + } + } catch (e) { + console.error(e); + } + + } +} + diff --git a/createAst.js b/createAst.js new file mode 100644 index 0000000..8958bbe --- /dev/null +++ b/createAst.js @@ -0,0 +1,71 @@ +import nearley from 'nearley'; +import compile from 'nearley/lib/compile.js'; +import generate from 'nearley/lib/generate.js'; +import nearleyGrammar from 'nearley/lib/nearley-language-bootstrapped.js'; +import moo from 'moo'; +import tokens from './tokens.js'; +import { readFileSync } from 'fs'; +import minify from './minify.js'; +const grammarFile = 'grammar.ne'; + +function createParser() { + // Parse the grammar source into an AST + const grammarParser = new nearley.Parser(nearleyGrammar); + grammarParser.feed(readFileSync(grammarFile).toString()); + const grammarAst = grammarParser.results[0]; // TODO check for errors + + // Compile the AST into a set of rules + const grammarInfoObject = compile(grammarAst, {}); + // Generate JavaScript code from the rules + const grammarJs = generate(grammarInfoObject, "grammar"); + + const lexer = moo.compile(tokens); + + // lexer.__proto__.formatError = function(token, message) { + // if (token == null) { + // // An undefined token indicates EOF + // var text = this.buffer.slice(this.index) + // var token = { + // text: text, + // offset: this.index, + // lineBreaks: text.indexOf('\n') === -1 ? 0 : 1, + // line: this.line, + // col: this.col, + // } + // } + // var start = Math.max(0, token.offset - token.col + 1) + // var eol = token.lineBreaks ? token.text.indexOf('\n') : token.text.length + // var firstLine = this.buffer.substring(start, token.offset + eol) + // message += " at line " + token.line + " col " + token.col + ":\n\n" + // message += " " + firstLine + "\n" + // message += " " + Array(token.col).join(" ") + "^" + // return message + // } + + // Pretend this is a CommonJS environment to catch exports from the grammar. + const module = { exports: {} }; + eval(grammarJs); + + const grammar = module.exports; + return new nearley.Parser(nearley.Grammar.fromCompiled(grammar)) +} + +export default function createAst(location) { + const parser = createParser(); + const contents = readFileSync(location).toString(); + + + // parser.reportError = function(token) { + // return JSON.stringify(token, null, 2); + // var message = this.lexer.formatError(token, 'invalid syntax') + '\n'; + // message += 'Unexpected ' + (token.type ? token.type + ' token: ' : ''); + // message += + // JSON.stringify(token.value !== undefined ? token.value : token) + '\n'; + // return message; + // }; + + + parser.feed(contents); + parser.finish(); + return parser.results[0]; +} \ No newline at end of file diff --git a/debug.js b/debug.js new file mode 100644 index 0000000..8124015 --- /dev/null +++ b/debug.js @@ -0,0 +1,7 @@ + +import d from 'debug'; + +export const ast = d('vogue:ast'); +export const modules = d('vogue:modules'); +export const system = d('vogue:system'); +export const debug = d('vogue:debug'); \ No newline at end of file diff --git a/run.js b/run.js index 18a496a..81f43c6 100755 --- a/run.js +++ b/run.js @@ -1,76 +1,24 @@ #!/usr/bin/env node -import debug from 'debug'; import { resolve, parse } from 'path'; -import { readFileSync, readdirSync } from 'fs'; -import nearley from 'nearley'; -import compile from 'nearley/lib/compile.js'; -import generate from 'nearley/lib/generate.js'; -import nearleyGrammar from 'nearley/lib/nearley-language-bootstrapped.js'; -import moo from 'moo'; -const grammarFile = 'grammar.ne'; +import { readdirSync } from 'fs'; + import _ from 'lodash'; const { get, set } = _; import Module from './Module.js'; import System from './System.js'; -import tokens from './tokens.js'; +import * as log from './debug.js'; +import createAst from './createAst.js'; // globals inside grammar context import minify from './minify.js'; -const log = { - ast: debug('vogue:ast'), - modules: debug('vogue:modules'), - debug: debug('vogue:debug'), -} - Object.defineProperty(Array.prototype, 'empty', { get() { return this.length === 0; } }); -function createParser() { - // Parse the grammar source into an AST - const grammarParser = new nearley.Parser(nearleyGrammar); - grammarParser.feed(readFileSync(grammarFile).toString()); - const grammarAst = grammarParser.results[0]; // TODO check for errors - - // Compile the AST into a set of rules - const grammarInfoObject = compile(grammarAst, {}); - // Generate JavaScript code from the rules - const grammarJs = generate(grammarInfoObject, "grammar"); - - const lexer = moo.compile(tokens); - - // lexer.__proto__.formatError = function(token, message) { - // if (token == null) { - // // An undefined token indicates EOF - // var text = this.buffer.slice(this.index) - // var token = { - // text: text, - // offset: this.index, - // lineBreaks: text.indexOf('\n') === -1 ? 0 : 1, - // line: this.line, - // col: this.col, - // } - // } - // var start = Math.max(0, token.offset - token.col + 1) - // var eol = token.lineBreaks ? token.text.indexOf('\n') : token.text.length - // var firstLine = this.buffer.substring(start, token.offset + eol) - // message += " at line " + token.line + " col " + token.col + ":\n\n" - // message += " " + firstLine + "\n" - // message += " " + Array(token.col).join(" ") + "^" - // return message - // } - - // Pretend this is a CommonJS environment to catch exports from the grammar. - const module = { exports: {} }; - eval(grammarJs); - - const grammar = module.exports; - return new nearley.Parser(nearley.Grammar.fromCompiled(grammar)) -} const systemLocation = resolve(process.argv[2]); const entry = process.argv[3]; @@ -81,7 +29,7 @@ const entry = process.argv[3]; (await Promise.all( readdirSync(systemLocation) .map(v => resolve(systemLocation, v)) - .map(parseModule) + .map(loc => new Module(loc)) )).reduce((acc, val) => { set(acc, val.name.full, val); return acc; @@ -89,70 +37,3 @@ const entry = process.argv[3]; const sys = new System(modules); })() - -async function parseModule(location) { - const parser = createParser(); - const contents = readFileSync(location).toString(); - const name = parse(location).name; - const module = new Module(); - - // parser.reportError = function(token) { - // return JSON.stringify(token, null, 2); - // var message = this.lexer.formatError(token, 'invalid syntax') + '\n'; - // message += 'Unexpected ' + (token.type ? token.type + ' token: ' : ''); - // message += - // JSON.stringify(token.value !== undefined ? token.value : token) + '\n'; - // return message; - // }; - - parser.feed(contents); - parser.finish(); - const parsed = parser.results[0]; - - log.ast('='.repeat(80)); - log.ast(location); - log.ast(parsed); - - module.name.last = name; - module.name.full = name; - try { - - // move this whole loop ass bitch into module. - for (const item of parsed) { - if ('name' in item && item.name in module.identifiers) - throw new Error('Identifier ' + item.name + ' already declared!'); - - module.identifiers[item.name] = item.type; - - if (item.type === 'link') { - module.links - [item.required ? 'required' : 'optional'] - [item.array ? 'arrays' : 'single'] - .push(item.name); - - } else if (item.type === 'namespace') { - module.name.space = item.namespace; - module.name.full = module.name.space + '.' + module.name.last; - - } else if (item.type === 'restore') { - module.functions.restore = item.block; - - } else if (item.type === 'function') { - module.functions[item.name] = item.block; - - } else if (item.type === 'import') { - const imported = await import(item.importName); - if('default' in imported) module.imports[item.name] = imported.default; - else module.imports[item.name] = imported; - - } else if (item.type === 'variable') { - module.variables[item.persist ? 'cold' : 'warm'].push(item.name); - } - } - } catch (e) { - console.error(e); - } - - return module; - -}