mirror of
https://github.com/wren-lang/wren.git
synced 2026-01-11 14:18:42 +01:00
Compare commits
7 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| ce8d89d18c | |||
| fbdf504c76 | |||
| 3dfba7c67a | |||
| 24be23a37d | |||
| 5aa0a5ee41 | |||
| b08cadefc0 | |||
| 46a33a412d |
@ -131,6 +131,19 @@ class WhereSequence is Sequence {
|
||||
class String is Sequence {
|
||||
bytes { StringByteSequence.new(this) }
|
||||
codePoints { StringCodePointSequence.new(this) }
|
||||
|
||||
static interpolate(interpolation) {
|
||||
var result = ""
|
||||
for (part in interpolation.parts) {
|
||||
if (part is String) {
|
||||
result = result + part
|
||||
} else {
|
||||
result = result + part.call().toString
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
}
|
||||
|
||||
class StringByteSequence is Sequence {
|
||||
@ -157,6 +170,31 @@ class StringCodePointSequence is Sequence {
|
||||
count { _string.count }
|
||||
}
|
||||
|
||||
class StringInterpolation {
|
||||
parts { _parts }
|
||||
|
||||
construct new_(list) {
|
||||
_parts = [list[0]]
|
||||
|
||||
var i = 1
|
||||
while (i < list.count) {
|
||||
_parts.add(InterpolatedField.new_(list[i], list[i + 1]))
|
||||
_parts.add(list[i + 2])
|
||||
i = i + 3
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class InterpolatedField {
|
||||
construct new_(fn, source) {
|
||||
_fn = fn
|
||||
_source = source
|
||||
}
|
||||
|
||||
call() { _fn.call() }
|
||||
source { _source }
|
||||
}
|
||||
|
||||
class List is Sequence {
|
||||
addAll(other) {
|
||||
for (element in other) {
|
||||
|
||||
62
doc/site outline.txt
Normal file
62
doc/site outline.txt
Normal file
@ -0,0 +1,62 @@
|
||||
-- new
|
||||
|
||||
- introduction
|
||||
- getting started
|
||||
- tutorial
|
||||
- community
|
||||
- contributing
|
||||
- guide
|
||||
- basic syntax
|
||||
scripts, comments, newlines, names
|
||||
- values
|
||||
null, bools, numbers, strings, and ranges
|
||||
- collections
|
||||
lists and maps
|
||||
- expressions
|
||||
calls, this, super, operators, is
|
||||
- control flow
|
||||
if, while, for, sequence protocol, && ||, ?:
|
||||
- variables
|
||||
var, assignment, scope, blocks
|
||||
- functions
|
||||
block arguments, closures
|
||||
- classes
|
||||
- concurrency
|
||||
- error handling
|
||||
- modules
|
||||
- embedding
|
||||
- core module
|
||||
...
|
||||
- cli modules
|
||||
- io
|
||||
- scheduler
|
||||
- timer
|
||||
...
|
||||
- reference
|
||||
- performance
|
||||
- faq
|
||||
|
||||
-- current
|
||||
|
||||
- getting started
|
||||
- language
|
||||
- syntax
|
||||
- expressions
|
||||
- variables
|
||||
- control flow
|
||||
- error handling
|
||||
- modules
|
||||
- types
|
||||
- values
|
||||
- classes
|
||||
- fibers
|
||||
- functions
|
||||
- lists
|
||||
- maps
|
||||
- reference
|
||||
- core
|
||||
- embedding api
|
||||
- performance
|
||||
- community
|
||||
- contributing
|
||||
- qa
|
||||
@ -65,7 +65,7 @@ class SyntaxExample {
|
||||
-(other) { "infix - " + other }
|
||||
*(other) { "infix * " + other }
|
||||
/(other) { "infix / " + other }
|
||||
%(other) { "infix % " + other }
|
||||
%(other) { "infix \% " + other }
|
||||
<(other) { "infix < " + other }
|
||||
>(other) { "infix > " + other }
|
||||
<=(other) { "infix <= " + other }
|
||||
|
||||
@ -24,17 +24,23 @@
|
||||
// Note that this limitation is also explicit in the bytecode. Since
|
||||
// `CODE_LOAD_LOCAL` and `CODE_STORE_LOCAL` use a single argument byte to
|
||||
// identify the local, only 256 can be in scope at one time.
|
||||
#define MAX_LOCALS (256)
|
||||
#define MAX_LOCALS 256
|
||||
|
||||
// The maximum number of upvalues (i.e. variables from enclosing functions)
|
||||
// that a function can close over.
|
||||
#define MAX_UPVALUES (256)
|
||||
#define MAX_UPVALUES 256
|
||||
|
||||
// The maximum number of distinct constants that a function can contain. This
|
||||
// value is explicit in the bytecode since `CODE_CONSTANT` only takes a single
|
||||
// two-byte argument.
|
||||
#define MAX_CONSTANTS (1 << 16)
|
||||
|
||||
// The maximum depth that interpolation can nest. For example, this string has
|
||||
// three levels:
|
||||
//
|
||||
// "outside %(one + "%(two + "%(three)")")"
|
||||
#define MAX_INTERPOLATION_NESTING 8
|
||||
|
||||
typedef enum
|
||||
{
|
||||
TOKEN_LEFT_PAREN,
|
||||
@ -95,7 +101,24 @@ typedef enum
|
||||
TOKEN_STATIC_FIELD,
|
||||
TOKEN_NAME,
|
||||
TOKEN_NUMBER,
|
||||
|
||||
// A string literal without any interpolation, or the last section of a
|
||||
// string following the last interpolated expression.
|
||||
TOKEN_STRING,
|
||||
|
||||
// A portion of a string literal preceding an interpolated expression. This
|
||||
// string:
|
||||
//
|
||||
// "a %(b) c %(d) e"
|
||||
//
|
||||
// is tokenized to:
|
||||
//
|
||||
// TOKEN_INTERPOLATION "a "
|
||||
// TOKEN_NAME b
|
||||
// TOKEN_INTERPOLATION " c "
|
||||
// TOKEN_NAME d
|
||||
// TOKEN_STRING " e"
|
||||
TOKEN_INTERPOLATION,
|
||||
|
||||
TOKEN_LINE,
|
||||
|
||||
@ -115,8 +138,33 @@ typedef struct
|
||||
|
||||
// The 1-based line where the token appears.
|
||||
int line;
|
||||
|
||||
// The parsed value if the token is a literal.
|
||||
Value value;
|
||||
} Token;
|
||||
|
||||
// Tracks the lexing state when tokenizing interpolated strings.
|
||||
//
|
||||
// Interpolated strings make the lexer not strictly regular: we don't know
|
||||
// whether a ")" should be treated as a RIGHT_PAREN token or as ending an
|
||||
// interpolated expression unless we know whether we are inside a string
|
||||
// interpolation and how many unmatched "(" there are. This is particularly
|
||||
// complex because interpolation can nest:
|
||||
//
|
||||
// " %( " %( inner ) " ) "
|
||||
//
|
||||
// This tracks that state. The parser maintains a stack of these, one for each
|
||||
// level of current interpolation nesting.
|
||||
typedef struct
|
||||
{
|
||||
// The number of opening "(" that have not been matched by a closing ")" yet.
|
||||
// This will be -1 after the final ")" has been emitted.
|
||||
int parens;
|
||||
|
||||
// The beginning of the interpolated expression in the source.
|
||||
const char* start;
|
||||
} Interpolation;
|
||||
|
||||
typedef struct
|
||||
{
|
||||
WrenVM* vm;
|
||||
@ -145,7 +193,11 @@ typedef struct
|
||||
|
||||
// The most recently consumed/advanced token.
|
||||
Token previous;
|
||||
|
||||
|
||||
// The stack of current interpolation expressions.
|
||||
Interpolation interpolation[MAX_INTERPOLATION_NESTING];
|
||||
int interpolationDepth;
|
||||
|
||||
// If subsequent newline tokens should be discarded.
|
||||
bool skipNewlines;
|
||||
|
||||
@ -154,9 +206,6 @@ typedef struct
|
||||
|
||||
// If a syntax or compile error has occurred.
|
||||
bool hasError;
|
||||
|
||||
// The parsed value if the current token is a literal.
|
||||
Value value;
|
||||
} Parser;
|
||||
|
||||
typedef struct
|
||||
@ -524,7 +573,7 @@ static void makeToken(Parser* parser, TokenType type)
|
||||
parser->current.start = parser->tokenStart;
|
||||
parser->current.length = (int)(parser->currentChar - parser->tokenStart);
|
||||
parser->current.line = parser->currentLine;
|
||||
|
||||
|
||||
// Make line tokens appear on the line containing the "\n".
|
||||
if (type == TOKEN_LINE) parser->current.line--;
|
||||
}
|
||||
@ -600,13 +649,13 @@ static void makeNumber(Parser* parser, bool isHex)
|
||||
|
||||
// We don't check that the entire token is consumed because we've already
|
||||
// scanned it ourselves and know it's valid.
|
||||
parser->value = NUM_VAL(isHex ? strtol(parser->tokenStart, NULL, 16)
|
||||
: strtod(parser->tokenStart, NULL));
|
||||
|
||||
parser->current.value = NUM_VAL(isHex ? strtol(parser->tokenStart, NULL, 16)
|
||||
: strtod(parser->tokenStart, NULL));
|
||||
|
||||
if (errno == ERANGE)
|
||||
{
|
||||
lexError(parser, "Number literal was too large.");
|
||||
parser->value = NUM_VAL(0);
|
||||
parser->current.value = NUM_VAL(0);
|
||||
}
|
||||
|
||||
makeToken(parser, TOKEN_NUMBER);
|
||||
@ -724,8 +773,9 @@ static void readUnicodeEscape(Parser* parser, ByteBuffer* string)
|
||||
static void readString(Parser* parser)
|
||||
{
|
||||
ByteBuffer string;
|
||||
TokenType type = TOKEN_STRING;
|
||||
wrenByteBufferInit(&string);
|
||||
|
||||
|
||||
for (;;)
|
||||
{
|
||||
char c = nextChar(parser);
|
||||
@ -741,12 +791,33 @@ static void readString(Parser* parser)
|
||||
break;
|
||||
}
|
||||
|
||||
if (c == '%')
|
||||
{
|
||||
if (parser->interpolationDepth < MAX_INTERPOLATION_NESTING)
|
||||
{
|
||||
// TODO: Allow format string.
|
||||
if (peekChar(parser) != '(') lexError(parser, "Expect '(' after '%'.");
|
||||
|
||||
parser->interpolation[parser->interpolationDepth].parens = 0;
|
||||
parser->interpolation[parser->interpolationDepth].start =
|
||||
parser->currentChar + 1;
|
||||
|
||||
parser->interpolationDepth++;
|
||||
type = TOKEN_INTERPOLATION;
|
||||
break;
|
||||
}
|
||||
|
||||
lexError(parser, "Interpolation may only nest %d levels deep.",
|
||||
MAX_INTERPOLATION_NESTING);
|
||||
}
|
||||
|
||||
if (c == '\\')
|
||||
{
|
||||
switch (nextChar(parser))
|
||||
{
|
||||
case '"': wrenByteBufferWrite(parser->vm, &string, '"'); break;
|
||||
case '\\': wrenByteBufferWrite(parser->vm, &string, '\\'); break;
|
||||
case '%': wrenByteBufferWrite(parser->vm, &string, '%'); break;
|
||||
case '0': wrenByteBufferWrite(parser->vm, &string, '\0'); break;
|
||||
case 'a': wrenByteBufferWrite(parser->vm, &string, '\a'); break;
|
||||
case 'b': wrenByteBufferWrite(parser->vm, &string, '\b'); break;
|
||||
@ -774,9 +845,19 @@ static void readString(Parser* parser)
|
||||
}
|
||||
}
|
||||
|
||||
parser->value = wrenNewString(parser->vm, (char*)string.data, string.count);
|
||||
parser->current.value = wrenNewString(parser->vm,
|
||||
(char*)string.data, string.count);
|
||||
|
||||
wrenByteBufferClear(parser->vm, &string);
|
||||
makeToken(parser, TOKEN_STRING);
|
||||
makeToken(parser, type);
|
||||
}
|
||||
|
||||
// Returns a pointer to the current innermost interpolation expression or `NULL`
|
||||
// if we aren't in the middle of an interpolation.
|
||||
Interpolation* currentInterpolation(Parser* parser)
|
||||
{
|
||||
if (parser->interpolationDepth == 0) return NULL;
|
||||
return &parser->interpolation[parser->interpolationDepth - 1];
|
||||
}
|
||||
|
||||
// Lex the next token and store it in [parser.current].
|
||||
@ -789,6 +870,16 @@ static void nextToken(Parser* parser)
|
||||
// will still work.
|
||||
if (parser->current.type == TOKEN_EOF) return;
|
||||
|
||||
// If we just ended an interpolation expression, read the next piece of the
|
||||
// string literal.
|
||||
Interpolation* interpolation = currentInterpolation(parser);
|
||||
if (interpolation != NULL && interpolation->parens == -1)
|
||||
{
|
||||
parser->interpolationDepth--;
|
||||
readString(parser);
|
||||
return;
|
||||
}
|
||||
|
||||
while (peekChar(parser) != '\0')
|
||||
{
|
||||
parser->tokenStart = parser->currentChar;
|
||||
@ -796,8 +887,32 @@ static void nextToken(Parser* parser)
|
||||
char c = nextChar(parser);
|
||||
switch (c)
|
||||
{
|
||||
case '(': makeToken(parser, TOKEN_LEFT_PAREN); return;
|
||||
case ')': makeToken(parser, TOKEN_RIGHT_PAREN); return;
|
||||
case '(':
|
||||
// If we are inside an interpolated expression, count the unmatched "(".
|
||||
if (interpolation != NULL) interpolation->parens++;
|
||||
makeToken(parser, TOKEN_LEFT_PAREN);
|
||||
return;
|
||||
|
||||
case ')':
|
||||
// If we are inside an interpolated expression, count the ")".
|
||||
if (interpolation != NULL && --interpolation->parens == 0)
|
||||
{
|
||||
// This is the final ")", so the interpolated expression has ended.
|
||||
// This ")" now begins the next section of the string interpolation.
|
||||
// Attach the source of the interpolated expression to the ")" as a
|
||||
// string so it's available to a customer interpolator.
|
||||
parser->current.value = wrenNewString(parser->vm,
|
||||
interpolation->start,
|
||||
(int)(parser->currentChar - interpolation->start) - 1);
|
||||
|
||||
// Mark this interpolation as done so that we'll start the string
|
||||
// section on the next token.
|
||||
interpolation->parens = -1;
|
||||
}
|
||||
|
||||
makeToken(parser, TOKEN_RIGHT_PAREN);
|
||||
return;
|
||||
|
||||
case '[': makeToken(parser, TOKEN_LEFT_BRACKET); return;
|
||||
case ']': makeToken(parser, TOKEN_RIGHT_BRACKET); return;
|
||||
case '{': makeToken(parser, TOKEN_LEFT_BRACE); return;
|
||||
@ -1840,15 +1955,9 @@ static void list(Compiler* compiler, bool allowAssignment)
|
||||
// Stop if we hit the end of the list.
|
||||
if (peek(compiler) == TOKEN_RIGHT_BRACKET) break;
|
||||
|
||||
// Push a copy of the list since the add() call will consume it.
|
||||
emit(compiler, CODE_DUP);
|
||||
|
||||
// The element.
|
||||
expression(compiler);
|
||||
callMethod(compiler, 1, "add(_)", 6);
|
||||
|
||||
// Discard the result of the add() call.
|
||||
emit(compiler, CODE_POP);
|
||||
callMethod(compiler, 1, "addCore_(_)", 11);
|
||||
} while (match(compiler, TOKEN_COMMA));
|
||||
|
||||
// Allow newlines before the closing ']'.
|
||||
@ -1872,9 +1981,6 @@ static void map(Compiler* compiler, bool allowAssignment)
|
||||
// Stop if we hit the end of the map.
|
||||
if (peek(compiler) == TOKEN_RIGHT_BRACE) break;
|
||||
|
||||
// Push a copy of the map since the subscript call will consume it.
|
||||
emit(compiler, CODE_DUP);
|
||||
|
||||
// The key.
|
||||
parsePrecedence(compiler, false, PREC_PRIMARY);
|
||||
consume(compiler, TOKEN_COLON, "Expect ':' after map key.");
|
||||
@ -1882,10 +1988,7 @@ static void map(Compiler* compiler, bool allowAssignment)
|
||||
|
||||
// The value.
|
||||
expression(compiler);
|
||||
callMethod(compiler, 2, "[_]=(_)", 7);
|
||||
|
||||
// Discard the result of the setter call.
|
||||
emit(compiler, CODE_POP);
|
||||
callMethod(compiler, 2, "addCore_(_,_)", 13);
|
||||
} while (match(compiler, TOKEN_COMMA));
|
||||
|
||||
// Allow newlines before the closing '}'.
|
||||
@ -2143,7 +2246,61 @@ static void null(Compiler* compiler, bool allowAssignment)
|
||||
// A number or string literal.
|
||||
static void literal(Compiler* compiler, bool allowAssignment)
|
||||
{
|
||||
emitConstant(compiler, compiler->parser->value);
|
||||
emitConstant(compiler, compiler->parser->previous.value);
|
||||
}
|
||||
|
||||
static void stringInterpolation(Compiler* compiler, bool allowAssignment)
|
||||
{
|
||||
// TODO: Allow other expressions here so that user-defined classes can control
|
||||
// interpolation processing like "tagged template strings" in ES6.
|
||||
loadCoreVariable(compiler, "String");
|
||||
|
||||
loadCoreVariable(compiler, "StringInterpolation");
|
||||
|
||||
// Instantiate a new list.
|
||||
loadCoreVariable(compiler, "List");
|
||||
callMethod(compiler, 0, "new()", 5);
|
||||
|
||||
do
|
||||
{
|
||||
// The opening string part.
|
||||
literal(compiler, false);
|
||||
callMethod(compiler, 1, "addCore_(_)", 11);
|
||||
|
||||
consume(compiler, TOKEN_LEFT_PAREN, "Expect '(' after '%'.");
|
||||
|
||||
ignoreNewlines(compiler);
|
||||
|
||||
// Compile the interpolated expression part to a function.
|
||||
Compiler fnCompiler;
|
||||
initCompiler(&fnCompiler, compiler->parser, compiler, true);
|
||||
expression(&fnCompiler);
|
||||
emit(&fnCompiler, CODE_RETURN);
|
||||
endCompiler(&fnCompiler, "interpolation", 9);
|
||||
|
||||
callMethod(compiler, 1, "addCore_(_)", 11);
|
||||
|
||||
ignoreNewlines(compiler);
|
||||
|
||||
// The value attached to the ")" is the source text of the interpolated
|
||||
// expression. Include that as a string literal so custom interpolaters
|
||||
// can see the uninterpolated code.
|
||||
consume(compiler, TOKEN_RIGHT_PAREN, "Expect ')' after interpolation.");
|
||||
literal(compiler, false);
|
||||
callMethod(compiler, 1, "addCore_(_)", 11);
|
||||
|
||||
} while (match(compiler, TOKEN_INTERPOLATION));
|
||||
|
||||
// The trailing string part.
|
||||
consume(compiler, TOKEN_STRING, "Expect end of string interpolation.");
|
||||
literal(compiler, false);
|
||||
callMethod(compiler, 1, "addCore_(_)", 11);
|
||||
|
||||
// Call StringInterpolation.new_() with the list.
|
||||
callMethod(compiler, 1, "new_(_)", 7);
|
||||
|
||||
// Call .interpolate() with the interpolation object.
|
||||
callMethod(compiler, 1, "interpolate(_)", 14);
|
||||
}
|
||||
|
||||
static void super_(Compiler* compiler, bool allowAssignment)
|
||||
@ -2487,6 +2644,7 @@ GrammarRule rules[] =
|
||||
/* TOKEN_NAME */ { name, NULL, namedSignature, PREC_NONE, NULL },
|
||||
/* TOKEN_NUMBER */ PREFIX(literal),
|
||||
/* TOKEN_STRING */ PREFIX(literal),
|
||||
/* TOKEN_INTERPOLATION */ PREFIX(stringInterpolation),
|
||||
/* TOKEN_LINE */ UNUSED,
|
||||
/* TOKEN_ERROR */ UNUSED,
|
||||
/* TOKEN_EOF */ UNUSED
|
||||
@ -3135,7 +3293,7 @@ static void classDefinition(Compiler* compiler, bool isForeign)
|
||||
static void import(Compiler* compiler)
|
||||
{
|
||||
consume(compiler, TOKEN_STRING, "Expect a string after 'import'.");
|
||||
int moduleConstant = addConstant(compiler, compiler->parser->value);
|
||||
int moduleConstant = addConstant(compiler, compiler->parser->previous.value);
|
||||
|
||||
// Load the module.
|
||||
emitShortArg(compiler, CODE_LOAD_MODULE, moduleConstant);
|
||||
@ -3229,11 +3387,11 @@ ObjFn* wrenCompile(WrenVM* vm, ObjModule* module, const char* sourcePath,
|
||||
parser.module = module;
|
||||
parser.sourcePath = AS_STRING(sourcePathValue);
|
||||
parser.source = source;
|
||||
parser.value = UNDEFINED_VAL;
|
||||
|
||||
parser.tokenStart = source;
|
||||
parser.currentChar = source;
|
||||
parser.currentLine = 1;
|
||||
parser.interpolationDepth = 0;
|
||||
|
||||
// Zero-init the current token. This will get copied to previous when
|
||||
// advance() is called below.
|
||||
@ -3241,6 +3399,7 @@ ObjFn* wrenCompile(WrenVM* vm, ObjModule* module, const char* sourcePath,
|
||||
parser.current.start = source;
|
||||
parser.current.length = 0;
|
||||
parser.current.line = 0;
|
||||
parser.current.value = UNDEFINED_VAL;
|
||||
|
||||
// Ignore leading newlines.
|
||||
parser.skipNewlines = true;
|
||||
@ -3282,7 +3441,7 @@ ObjFn* wrenCompile(WrenVM* vm, ObjModule* module, const char* sourcePath,
|
||||
parser.module->variableNames.data[i].buffer);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return endCompiler(&compiler, "(script)", 8);
|
||||
}
|
||||
|
||||
@ -3355,7 +3514,8 @@ void wrenBindMethodCode(ObjClass* classObj, ObjFn* fn)
|
||||
void wrenMarkCompiler(WrenVM* vm, Compiler* compiler)
|
||||
{
|
||||
wrenMarkObj(vm, (Obj*)compiler->parser->sourcePath);
|
||||
wrenMarkValue(vm, compiler->parser->value);
|
||||
wrenMarkValue(vm, compiler->parser->previous.value);
|
||||
wrenMarkValue(vm, compiler->parser->current.value);
|
||||
|
||||
// Walk up the parent chain to mark the outer compilers too. The VM only
|
||||
// tracks the innermost one.
|
||||
|
||||
@ -264,6 +264,17 @@ DEF_PRIMITIVE(list_add)
|
||||
RETURN_VAL(args[1]);
|
||||
}
|
||||
|
||||
// Adds an element to the list and then returns the list itself. This is called
|
||||
// by the compiler when compiling list literals instead of using add() to
|
||||
// minimize stack churn.
|
||||
DEF_PRIMITIVE(list_addCore)
|
||||
{
|
||||
wrenValueBufferWrite(vm, &AS_LIST(args[0])->elements, args[1]);
|
||||
|
||||
// Return the list.
|
||||
RETURN_VAL(args[0]);
|
||||
}
|
||||
|
||||
DEF_PRIMITIVE(list_clear)
|
||||
{
|
||||
wrenValueBufferClear(vm, &AS_LIST(args[0])->elements);
|
||||
@ -394,6 +405,19 @@ DEF_PRIMITIVE(map_subscriptSetter)
|
||||
RETURN_VAL(args[2]);
|
||||
}
|
||||
|
||||
// Adds an entry to the map and then returns the map itself. This is called by
|
||||
// the compiler when compiling map literals instead of using [_]=(_) to
|
||||
// minimize stack churn.
|
||||
DEF_PRIMITIVE(map_addCore)
|
||||
{
|
||||
if (!validateKey(vm, args[1])) return false;
|
||||
|
||||
wrenMapSet(vm, AS_MAP(args[0]), args[1], args[2]);
|
||||
|
||||
// Return the map itself.
|
||||
RETURN_VAL(args[0]);
|
||||
}
|
||||
|
||||
DEF_PRIMITIVE(map_clear)
|
||||
{
|
||||
wrenMapClear(vm, AS_MAP(args[0]));
|
||||
@ -1218,6 +1242,7 @@ void wrenInitializeCore(WrenVM* vm)
|
||||
PRIMITIVE(vm->listClass, "[_]", list_subscript);
|
||||
PRIMITIVE(vm->listClass, "[_]=(_)", list_subscriptSetter);
|
||||
PRIMITIVE(vm->listClass, "add(_)", list_add);
|
||||
PRIMITIVE(vm->listClass, "addCore_(_)", list_addCore);
|
||||
PRIMITIVE(vm->listClass, "clear()", list_clear);
|
||||
PRIMITIVE(vm->listClass, "count", list_count);
|
||||
PRIMITIVE(vm->listClass, "insert(_,_)", list_insert);
|
||||
@ -1229,6 +1254,7 @@ void wrenInitializeCore(WrenVM* vm)
|
||||
PRIMITIVE(vm->mapClass->obj.classObj, "new()", map_new);
|
||||
PRIMITIVE(vm->mapClass, "[_]", map_subscript);
|
||||
PRIMITIVE(vm->mapClass, "[_]=(_)", map_subscriptSetter);
|
||||
PRIMITIVE(vm->mapClass, "addCore_(_,_)", map_addCore);
|
||||
PRIMITIVE(vm->mapClass, "clear()", map_clear);
|
||||
PRIMITIVE(vm->mapClass, "containsKey(_)", map_containsKey);
|
||||
PRIMITIVE(vm->mapClass, "count", map_count);
|
||||
|
||||
@ -133,6 +133,19 @@ static const char* coreModuleSource =
|
||||
"class String is Sequence {\n"
|
||||
" bytes { StringByteSequence.new(this) }\n"
|
||||
" codePoints { StringCodePointSequence.new(this) }\n"
|
||||
"\n"
|
||||
" static interpolate(interpolation) {\n"
|
||||
" var result = \"\"\n"
|
||||
" for (part in interpolation.parts) {\n"
|
||||
" if (part is String) {\n"
|
||||
" result = result + part\n"
|
||||
" } else {\n"
|
||||
" result = result + part.call().toString\n"
|
||||
" }\n"
|
||||
" }\n"
|
||||
"\n"
|
||||
" return result\n"
|
||||
" }\n"
|
||||
"}\n"
|
||||
"\n"
|
||||
"class StringByteSequence is Sequence {\n"
|
||||
@ -159,6 +172,31 @@ static const char* coreModuleSource =
|
||||
" count { _string.count }\n"
|
||||
"}\n"
|
||||
"\n"
|
||||
"class StringInterpolation {\n"
|
||||
" parts { _parts }\n"
|
||||
"\n"
|
||||
" construct new_(list) {\n"
|
||||
" _parts = [list[0]]\n"
|
||||
"\n"
|
||||
" var i = 1\n"
|
||||
" while (i < list.count) {\n"
|
||||
" _parts.add(InterpolatedField.new_(list[i], list[i + 1]))\n"
|
||||
" _parts.add(list[i + 2])\n"
|
||||
" i = i + 3\n"
|
||||
" }\n"
|
||||
" }\n"
|
||||
"}\n"
|
||||
"\n"
|
||||
"class InterpolatedField {\n"
|
||||
" construct new_(fn, source) {\n"
|
||||
" _fn = fn\n"
|
||||
" _source = source\n"
|
||||
" }\n"
|
||||
"\n"
|
||||
" call() { _fn.call() }\n"
|
||||
" source { _source }\n"
|
||||
"}\n"
|
||||
"\n"
|
||||
"class List is Sequence {\n"
|
||||
" addAll(other) {\n"
|
||||
" for (element in other) {\n"
|
||||
@ -218,18 +256,18 @@ static const char* coreModuleSource =
|
||||
"\n"
|
||||
"class System {\n"
|
||||
" static print() {\n"
|
||||
" writeString_(\"\n\")\n"
|
||||
" writeString_(\"\\n\")\n"
|
||||
" }\n"
|
||||
"\n"
|
||||
" static print(obj) {\n"
|
||||
" writeObject_(obj)\n"
|
||||
" writeString_(\"\n\")\n"
|
||||
" writeString_(\"\\n\")\n"
|
||||
" return obj\n"
|
||||
" }\n"
|
||||
"\n"
|
||||
" static printAll(sequence) {\n"
|
||||
" for (object in sequence) writeObject_(object)\n"
|
||||
" writeString_(\"\n\")\n"
|
||||
" writeString_(\"\\n\")\n"
|
||||
" }\n"
|
||||
"\n"
|
||||
" static write(obj) {\n"
|
||||
|
||||
@ -31,7 +31,7 @@ foreign class Point is PointBase {
|
||||
}
|
||||
|
||||
construct new(x, y, z) {
|
||||
System.print(x.toString + ", " + y.toString + ", " + z.toString)
|
||||
System.print("%(x), %(y), %(z)")
|
||||
}
|
||||
|
||||
foreign translate(x, y, z)
|
||||
|
||||
@ -26,8 +26,8 @@ var stretchDepth = maxDepth + 1
|
||||
|
||||
var start = System.clock
|
||||
|
||||
System.print("stretch tree of depth " + stretchDepth.toString + " check: " +
|
||||
Tree.new(0, stretchDepth).check.toString)
|
||||
System.print("stretch tree of depth %(stretchDepth) check: " +
|
||||
"%(Tree.new(0, stretchDepth).check)")
|
||||
|
||||
var longLivedTree = Tree.new(0, maxDepth)
|
||||
|
||||
@ -44,12 +44,11 @@ while (depth < stretchDepth) {
|
||||
check = check + Tree.new(i, depth).check + Tree.new(-i, depth).check
|
||||
}
|
||||
|
||||
System.print((iterations * 2).toString + " trees of depth " +
|
||||
depth.toString + " check: " + check.toString)
|
||||
System.print("%(iterations * 2) trees of depth %(depth) check: %(check)")
|
||||
iterations = iterations / 4
|
||||
depth = depth + 2
|
||||
}
|
||||
|
||||
System.print("long lived tree of depth " + maxDepth.toString + " check: " +
|
||||
longLivedTree.check.toString)
|
||||
System.print(
|
||||
"long lived tree of depth %(maxDepth) check: %(longLivedTree.check)")
|
||||
System.print("elapsed: " + (System.clock - start).toString)
|
||||
|
||||
@ -696,4 +696,4 @@ for (i in 0...40) {
|
||||
}
|
||||
|
||||
System.print(total)
|
||||
System.print("elapsed: " + (System.clock - start).toString)
|
||||
System.print("elapsed: %(System.clock - start)")
|
||||
|
||||
@ -9,4 +9,4 @@ var start = System.clock
|
||||
for (i in 1..5) {
|
||||
System.print(Fib.get(28))
|
||||
}
|
||||
System.print("elapsed: " + (System.clock - start).toString)
|
||||
System.print("elapsed: %(System.clock - start)")
|
||||
|
||||
@ -13,4 +13,4 @@ for (i in 0...100000) {
|
||||
|
||||
fibers[0].call()
|
||||
System.print(sum)
|
||||
System.print("elapsed: " + (System.clock - start).toString)
|
||||
System.print("elapsed: %(System.clock - start)")
|
||||
|
||||
@ -7,4 +7,4 @@ var sum = 0
|
||||
for (i in list) sum = sum + i
|
||||
|
||||
System.print(sum)
|
||||
System.print("elapsed: " + (System.clock - start).toString)
|
||||
System.print("elapsed: %(System.clock - start)")
|
||||
|
||||
@ -16,4 +16,4 @@ for (i in 1..1000000) {
|
||||
map.remove(i)
|
||||
}
|
||||
|
||||
System.print("elapsed: " + (System.clock - start).toString)
|
||||
System.print("elapsed: %(System.clock - start)")
|
||||
|
||||
@ -97,4 +97,4 @@ for (key in keys) {
|
||||
}
|
||||
|
||||
System.print(sum)
|
||||
System.print("elapsed: " + (System.clock - start).toString)
|
||||
System.print("elapsed: %(System.clock - start)")
|
||||
|
||||
@ -65,4 +65,4 @@ for (i in 0...n) {
|
||||
}
|
||||
|
||||
System.print(ntoggle.value)
|
||||
System.print("elapsed: " + (System.clock - start).toString)
|
||||
System.print("elapsed: %(System.clock - start)")
|
||||
|
||||
@ -18,7 +18,7 @@ for i in range(0, 1000000):
|
||||
count = count + 1
|
||||
if "abc" == "abcd":
|
||||
count = count + 1
|
||||
if "changed one character" == "changed %ne character":
|
||||
if "changed one character" == "changed !ne character":
|
||||
count = count + 1
|
||||
if "123" == 123: count = count + 1
|
||||
if "a slightly longer string" == \
|
||||
|
||||
@ -10,7 +10,7 @@ for (i in 1..1000000) {
|
||||
|
||||
if ("" == "abc") count = count + 1
|
||||
if ("abc" == "abcd") count = count + 1
|
||||
if ("changed one character" == "changed %ne character") count = count + 1
|
||||
if ("changed one character" == "changed !ne character") count = count + 1
|
||||
if ("123" == 123) count = count + 1
|
||||
if ("a slightly longer string" ==
|
||||
"a slightly longer string!") count = count + 1
|
||||
@ -21,4 +21,4 @@ for (i in 1..1000000) {
|
||||
}
|
||||
|
||||
System.print(count)
|
||||
System.print("elapsed: " + (System.clock - start).toString)
|
||||
System.print("elapsed: %(System.clock - start)")
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
var f0 = Fn.new { System.print("zero") }
|
||||
var f1 = Fn.new {|a| System.print("one " + a) }
|
||||
var f2 = Fn.new {|a, b| System.print("two " + a + " " + b) }
|
||||
var f3 = Fn.new {|a, b, c| System.print("three " + a + " " + b + " " + c) }
|
||||
var f1 = Fn.new {|a| System.print("one %(a)") }
|
||||
var f2 = Fn.new {|a, b| System.print("two %(a) %(b)") }
|
||||
var f3 = Fn.new {|a, b, c| System.print("three %(a) %(b) %(c)") }
|
||||
|
||||
f0.call("a") // expect: zero
|
||||
f0.call("a", "b") // expect: zero
|
||||
|
||||
@ -19,10 +19,10 @@ System.print({1: Foo.new()}) // expect: {1: Foo.toString}
|
||||
// will be.
|
||||
var s = {1: 2, 3: 4, 5: 6}.toString
|
||||
System.print(s == "{1: 2, 3: 4, 5: 6}" ||
|
||||
s == "{1: 2, 5: 6, 3: 4}" ||
|
||||
s == "{3: 4, 1: 2, 5: 6}" ||
|
||||
s == "{3: 4, 5: 6, 1: 2}" ||
|
||||
s == "{5: 6, 1: 2, 3: 4}" ||
|
||||
s == "{5: 6, 3: 4, 1: 2}") // expect: true
|
||||
s == "{1: 2, 5: 6, 3: 4}" ||
|
||||
s == "{3: 4, 1: 2, 5: 6}" ||
|
||||
s == "{3: 4, 5: 6, 1: 2}" ||
|
||||
s == "{5: 6, 1: 2, 3: 4}" ||
|
||||
s == "{5: 6, 3: 4, 1: 2}") // expect: true
|
||||
|
||||
// TODO: Handle maps that contain themselves.
|
||||
// TODO: Handle maps that contain themselves.
|
||||
|
||||
@ -1,9 +1,9 @@
|
||||
for (i in 0..2) {
|
||||
System.print("outer " + i.toString)
|
||||
System.print("outer %(i)")
|
||||
if (i > 1) break
|
||||
|
||||
for (j in 0..2) {
|
||||
System.print("inner " + j.toString)
|
||||
System.print("inner %(j)")
|
||||
if (j > 1) break
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,11 +1,11 @@
|
||||
var i = 0
|
||||
while (true) {
|
||||
System.print("outer " + i.toString)
|
||||
System.print("outer %(i)")
|
||||
if (i > 1) break
|
||||
|
||||
var j = 0
|
||||
while (true) {
|
||||
System.print("inner " + j.toString)
|
||||
System.print("inner %(j)")
|
||||
if (j > 1) break
|
||||
|
||||
j = j + 1
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
class A {
|
||||
construct new(arg) {
|
||||
System.print("new A " + arg)
|
||||
System.print("new A %(arg)")
|
||||
_field = arg
|
||||
}
|
||||
|
||||
@ -10,7 +10,7 @@ class A {
|
||||
class B is A {
|
||||
construct new(arg1, arg2) {
|
||||
super(arg2)
|
||||
System.print("new B " + arg1)
|
||||
System.print("new B %(arg1)")
|
||||
_field = arg1
|
||||
}
|
||||
|
||||
|
||||
1
test/language/interpolation/empty.wren
Normal file
1
test/language/interpolation/empty.wren
Normal file
@ -0,0 +1 @@
|
||||
" %() " // expect error
|
||||
15
test/language/interpolation/interpolation.wren
Normal file
15
test/language/interpolation/interpolation.wren
Normal file
@ -0,0 +1,15 @@
|
||||
// Full string.
|
||||
System.print("%(1 + 2)") // expect: 3
|
||||
|
||||
// Multiple in one string.
|
||||
System.print("str%(1 + 2)(%(3 + 4)\%%(5 + 6)") // expect: str3(7%11
|
||||
|
||||
// Nested.
|
||||
System.print("[%("{%("in" + "ner")}")]") // expect: [{inner}]
|
||||
|
||||
// Ignore newlines in template.
|
||||
System.print("[%(
|
||||
|
||||
"template"
|
||||
|
||||
)]") // expect: [template]
|
||||
@ -0,0 +1 @@
|
||||
System.print("%(123.badMethod)") // expect runtime error: Num does not implement 'badMethod'.
|
||||
8
test/language/interpolation/switch_fiber.wren
Normal file
8
test/language/interpolation/switch_fiber.wren
Normal file
@ -0,0 +1,8 @@
|
||||
var fiber = Fiber.new {
|
||||
System.print("in fiber")
|
||||
Fiber.yield("result")
|
||||
}
|
||||
|
||||
System.print("outer %(fiber.call()) string")
|
||||
// expect: in fiber
|
||||
// expect: outer result string
|
||||
2
test/language/interpolation/unterminated.wren
Normal file
2
test/language/interpolation/unterminated.wren
Normal file
@ -0,0 +1,2 @@
|
||||
" %(
|
||||
// expect error
|
||||
2
test/language/interpolation/unterminated_expression.wren
Normal file
2
test/language/interpolation/unterminated_expression.wren
Normal file
@ -0,0 +1,2 @@
|
||||
// expect error line 2
|
||||
" %(123"
|
||||
@ -5,7 +5,7 @@ class Foo {
|
||||
-(other) { "infix - " + other }
|
||||
*(other) { "infix * " + other }
|
||||
/(other) { "infix / " + other }
|
||||
%(other) { "infix % " + other }
|
||||
%(other) { "infix \% " + other }
|
||||
<(other) { "infix < " + other }
|
||||
>(other) { "infix > " + other }
|
||||
<=(other) { "infix <= " + other }
|
||||
|
||||
@ -2,8 +2,8 @@
|
||||
System.print("start a")
|
||||
|
||||
var A = "a value"
|
||||
System.print("a defined " + A)
|
||||
System.print("a defined %(A)")
|
||||
import "b" for B
|
||||
System.print("a imported " + B)
|
||||
System.print("a imported %(B)")
|
||||
|
||||
System.print("end a")
|
||||
System.print("end a")
|
||||
|
||||
@ -2,8 +2,8 @@
|
||||
System.print("start b")
|
||||
|
||||
var B = "b value"
|
||||
System.print("b defined " + B)
|
||||
System.print("b defined %(B)")
|
||||
import "a" for A
|
||||
System.print("b imported " + A)
|
||||
System.print("b imported %(A)")
|
||||
|
||||
System.print("end b")
|
||||
|
||||
@ -2,6 +2,7 @@
|
||||
System.print("\"") // expect: "
|
||||
System.print("\\") // expect: \
|
||||
System.print("(\n)") // expect: (
|
||||
// expect: )
|
||||
// expect: )
|
||||
System.print("\%") // expect: %
|
||||
|
||||
// TODO: Non-printing escapes like \t.
|
||||
|
||||
1
test/limit/interpolation_nesting.wren
Normal file
1
test/limit/interpolation_nesting.wren
Normal file
@ -0,0 +1 @@
|
||||
System.print("0 %("1 %("2 %("3 %("4 %("5 %("6 %("7 %(8)")")")")")")")") // expect: 0 1 2 3 4 5 6 7 8
|
||||
1
test/limit/too_much_interpolation_nesting.wren
Normal file
1
test/limit/too_much_interpolation_nesting.wren
Normal file
@ -0,0 +1 @@
|
||||
System.print("0 %("1 %("2 %("3 %("4 %("5 %("6 %("7 %("8 %(9)")")")")")")")")") // expect error
|
||||
@ -22,6 +22,7 @@ static const char* {1}ModuleSource =
|
||||
def wren_to_c_string(input_path, wren_source_lines, module):
|
||||
wren_source = ""
|
||||
for line in wren_source_lines:
|
||||
line = line.replace("\\", "\\\\")
|
||||
line = line.replace('"', "\\\"")
|
||||
line = line.replace("\n", "\\n\"")
|
||||
if wren_source: wren_source += "\n"
|
||||
|
||||
Reference in New Issue
Block a user