Skip to content

Commit

Permalink
fix some problems with unnaturalcode... validate kinda works now?
Browse files Browse the repository at this point in the history
  • Loading branch information
Joshua Charles Campbell committed Mar 7, 2017
1 parent 601fbaf commit f9688fb
Show file tree
Hide file tree
Showing 10 changed files with 331 additions and 4 deletions.
113 changes: 113 additions & 0 deletions unnaturalcode/jsSource.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,113 @@
#!/usr/bin/python
# Copyright 2017 Joshua Charles Campbell, Eddie Antonio Santos
#
# This file is part of UnnaturalCode.
#
# UnnaturalCode is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# UnnaturalCode is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with UnnaturalCode. If not, see <http://www.gnu.org/licenses/>.

from __future__ import print

import json
import subprocess
import tempfile
from pathlib import Path
from typing import Sequence, TextIO, cast

THIS_DIRECTORY = Path(__file__).parent
TOKENIZE_JS_BIN = (str(THIS_DIRECTORY / 'tokenize-js' / 'wrapper.sh'),)
CHECK_SYNTAX_BIN = (*TOKENIZE_JS_BIN, '--check-syntax')


def synthetic_file(text):
"""
Creates an unnamed temporary file with the given text content.
The returned file object always has a fileno.
"""
file_obj = tempfile.TemporaryFile('w+t', encoding='utf-8')
file_obj.write(text)
file_obj.seek(0)
return file_obj


def tokenize(text):
"""
Tokenizes the given string.
>>> tokens = tokenize('$("hello");')
>>> len(tokens)
5
>>> isinstance(tokens[0], Token)
True
"""
with synthetic_file(text) as f:
return tokenize_file(f)


def check_syntax(source):
"""
Checks the syntax of the given JavaScript string.
>>> check_syntax('function name() {}')
True
>>> check_syntax('function name() }')
False
"""
with synthetic_file(source) as source_file:
return check_syntax_file(source_file)


def tokenize_file(file_obj):
"""
Tokenizes the given JavaScript file.
>>> with synthetic_file('$("hello");') as f:
... tokens = tokenize_file(f)
>>> len(tokens)
5
>>> isinstance(tokens[0], Token)
True
"""
status = subprocess.run(TOKENIZE_JS_BIN,
check=True,
stdin=file_obj,
stdout=subprocess.PIPE)
raw = json.loads(status.stdout.decode('UTF-8')
print(json.dumps(raw, indent=2))
return None


def check_syntax_file(source_file):
"""
Check the syntax of the give JavaScript file.
>>> with synthetic_file('$("hello");') as f:
... assert check_syntax_file(f)
>>> with synthetic_file('$("hello" + );') as f:
... assert check_syntax_file(f)
Traceback (most recent call last):
...
AssertionError
"""
status = subprocess.run(CHECK_SYNTAX_BIN, stdin=source_file)
return status.returncode == 0


class jsLexeme(ucLexeme):
pass

class jsSource(ucSource):

lexemeClass = ucLexeme

def lex():
3 changes: 2 additions & 1 deletion unnaturalcode/modelValidator.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,6 +115,7 @@ def runFile(q,path,mode):
os.dup2(old_stderr, sys.stderr.fileno())
os.dup2(old_stdin, sys.stdin.fileno())
ei = sys.exc_info();
info("run_path exception:", exc_info=ei)
eip = (ei[0], str(ei[1]), traceback.extract_tb(ei[2]))
try:
eip[2].append(ei[1][1])
Expand All @@ -127,7 +128,7 @@ def runFile(q,path,mode):
os.dup2(old_stderr, sys.stderr.fileno())
os.dup2(old_stdin, sys.stdin.fileno())
ei = sys.exc_info();
#info("run_path exception:", exc_info=ei)
info("run_path exception:", exc_info=ei)
eip = (ei[0], str(ei[1]), traceback.extract_tb(ei[2]))
q.put(eip)
return
Expand Down
3 changes: 2 additions & 1 deletion unnaturalcode/pythonSource.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,8 @@ def fromTuple(cls, tup):
t0 = token.tok_name[tup[0]]
else:
t0 = tup[0]
return tuple.__new__(cls, (t0, str(tup[1]), ucPos(tup[2]), ucPos(tup[3]), cls.stringify(t0, str(tup[1]))))
new = tuple.__new__(cls, (t0, tup[1], ucPos(tup[2]), ucPos(tup[3]), cls.stringify_build(t0, tup[1])))
return new

def comment(self):
return (self.ltype == 'COMMENT')
Expand Down
2 changes: 1 addition & 1 deletion unnaturalcode/sourceModel.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ def predictLexed(self, lexemes):

def windowedQuery(self, lexemes, returnWindows=True):
lastWindowStarts = len(lexemes)-self.windowSize
error("Query was %i long:" % (len(lexemes),))
#error("Query was %i long:" % (len(lexemes),))
if lastWindowStarts < 1:
if returnWindows:
return [(lexemes, self.queryLexed(lexemes))]
Expand Down
1 change: 1 addition & 0 deletions unnaturalcode/tokenize-js/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
node_modules/
110 changes: 110 additions & 0 deletions unnaturalcode/tokenize-js/index.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,110 @@
#!/usr/bin/env node
/*
* Copyright 2016 Eddie Antonio Santos <[email protected]>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

'use strict';

const fs = require('fs');
const esprima = require('esprima');

module.exports.tokenize = tokenize;
module.exports.checkSyntax = checkSyntax;


if (require.main === module) {
const source = fs.readFileSync('/dev/stdin', 'utf8');
const shouldCheckSyntax =
process.argv.slice(2).indexOf('--check-syntax') >= 0;
if (shouldCheckSyntax) {
process.exit(checkSyntax(source) ? 0 : 1);
} else {
console.log(JSON.stringify(tokenize(source)));
}
}


function tokenize(source) {
source = removeShebangLine(source);

/* TODO: retry on illegal tokens. */

const sourceType = deduceSourceType(source);
const tokens = esprima.tokenize(source, {
sourceType,
loc: true,
tolerant: true
});

return tokens;
}

function checkSyntax(source) {
source = removeShebangLine(source);
const sourceType = deduceSourceType(source);

try {
esprima.parse(source, { sourceType });
return true;
} catch (e) {
return false;
}
}

/**
* Remove the shebang line, if there is one.
*/
function removeShebangLine(source) {
return source.replace(/^#![^\r\n]+/, '');
}


/*
Adapted from: http://esprima.org/demo/parse.js
Copyright (C) 2013 Ariya Hidayat <[email protected]>
Copyright (C) 2012 Ariya Hidayat <[email protected]>
Copyright (C) 2011 Ariya Hidayat <[email protected]>
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
function deduceSourceType(code) {
try {
esprima.parse(code, { sourceType: 'script' });
return 'script';
} catch (e) {
return 'module';
}
}

/* eslint no-console: 0 */
17 changes: 17 additions & 0 deletions unnaturalcode/tokenize-js/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
{
"name": "tokenize-js",
"version": "1.0.0",
"description": "Tokenizes JavaScript",
"main": "index.js",
"scripts": {
"test": "ava"
},
"author": "",
"license": "Apache-2.0",
"dependencies": {
"esprima": "^3.1.1"
},
"devDependencies": {
"ava": "^0.17.0"
}
}
68 changes: 68 additions & 0 deletions unnaturalcode/tokenize-js/test.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
/*
* Copyright 2016 Eddie Antonio Santos <[email protected]>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

import test from 'ava';

import {tokenize, checkSyntax} from './';

test('it tokenizes a trivial script', t => {
const tokens = tokenize('$');

t.is(1, tokens.length);
});

test('it returns line and column numbers', t => {
const tokens = tokenize('\n$ ');
t.is(1, tokens.length);
const [token] = tokens;
t.truthy(token.loc);
t.truthy(token.loc.start);
/* 1-indexed. */
t.is(2, token.loc.start.line);
/* 0-indexed. */
t.is(0, token.loc.start.column);
});

test('it can deal with erroneous input', t => {
let tokens;
t.notThrows(() => {
tokens = tokenize(`
module.exports = function()
console.log('Hello, world!');
};
`);
});

t.is(16, tokens.length);
});

test.skip('it can deal with illegal tokens', t => {
let tokens;
t.notThrows(() => {
tokens = tokenize(`
module.exports = function(# {
};
`);
});

t.is(10, tokens.length);
});

test('it can syntax check', t => {
t.true(checkSyntax('function fun() { }'));
t.false(checkSyntax('function fun() };'));
});
16 changes: 16 additions & 0 deletions unnaturalcode/tokenize-js/wrapper.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
#!/bin/sh

# Wraps the invocation of index.js such that it automatically NPM installs if
# there are
# Auto npm install before running index.js

original_dir=$(pwd)
script_dir=$(dirname "$0")

cd "$script_dir" || exit -1
if [ node_modules -ot package.json ]; then
npm install > /dev/null
fi
cd "$original_dir" || exit -1

exec node "$script_dir/index.js" "$@"
2 changes: 1 addition & 1 deletion unnaturalcode/unnaturalCode.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,7 @@ def stringify_build(cls, t, v):
return v
else:
return '<'+t+'>'

def stringify(self):
return self.__class__.stringify_build(self.ltype, self.val)

Expand Down

0 comments on commit f9688fb

Please sign in to comment.