diff --git a/.github/workflows/check.yml b/.github/workflows/check.yml
index 4264d7a087..f785b2d991 100644
--- a/.github/workflows/check.yml
+++ b/.github/workflows/check.yml
@@ -3,7 +3,6 @@ on: [push, pull_request]
jobs:
check:
name: Check
- runs-on: ubuntu-latest
strategy:
matrix:
os: [ubuntu-latest, macos-latest]
diff --git a/docs/themes/navy/layout/partial/all-contributors.swig b/docs/themes/navy/layout/partial/all-contributors.swig
index a00b78f466..db19cd6dff 100644
--- a/docs/themes/navy/layout/partial/all-contributors.swig
+++ b/docs/themes/navy/layout/partial/all-contributors.swig
@@ -64,6 +64,8 @@
![Slav Ivanov](https://avatars.githubusercontent.com/u/713329?v=4?s=100) |
![Daniel Rosenberg](https://avatars.githubusercontent.com/u/3889090?v=4?s=100) |
![bobgubko](https://avatars.githubusercontent.com/u/733312?v=4?s=100) |
+ ![BaNgan](https://avatars.githubusercontent.com/u/10071857?v=4?s=100) |
+ ![Mahyar Pasarzangene](https://avatars.githubusercontent.com/u/16485039?v=4?s=100) |
diff --git a/package.json b/package.json
index 2ba739faf2..dd3363fd75 100644
--- a/package.json
+++ b/package.json
@@ -24,6 +24,7 @@
"postversion": "npm run build:dist",
"build": "npm run build:dist && npm run build:docs",
"build:dist": "rollup -c rollup.config.mjs",
+ "build:cjs": "BUNDLES=cjs rollup -c rollup.config.mjs",
"build:docs": "bin/build-docs.sh"
},
"bin": {
diff --git a/src/filters/array.ts b/src/filters/array.ts
index a82b231645..6a6e35681d 100644
--- a/src/filters/array.ts
+++ b/src/filters/array.ts
@@ -89,13 +89,9 @@ export function uniq (arr: T[]): T[] {
})
}
-export function sample (v: T[] | string, count: number | undefined = undefined): T[] | string {
+export function sample (v: T[] | string, count: number | undefined = undefined): (T | string)[] {
v = toValue(v)
if (isNil(v)) return []
- if (!isArray(v)) {
- v = stringify(v)
- return [...v].sort(() => Math.random()).slice(0, count).join('')
- }
-
+ if (!isArray(v)) v = stringify(v)
return [...v].sort(() => Math.random()).slice(0, count)
}
diff --git a/src/parser/parser.ts b/src/parser/parser.ts
index ff9d215712..7f1ff0d075 100644
--- a/src/parser/parser.ts
+++ b/src/parser/parser.ts
@@ -5,6 +5,7 @@ import { TopLevelToken, OutputToken } from '../tokens'
import { Template, Output, HTML } from '../template'
import { LiquidCache } from '../cache'
import { FS, Loader, LookupType } from '../fs'
+import { LiquidError } from '../util/error'
import type { Liquid } from '../liquid'
export class Parser {
@@ -47,6 +48,7 @@ export class Parser {
}
return new HTML(token)
} catch (e) {
+ if (e instanceof LiquidError) throw e
throw new ParseError(e as Error, token)
}
}
diff --git a/src/parser/token-kind.ts b/src/parser/token-kind.ts
index afa1f7ab3d..f091519ab4 100644
--- a/src/parser/token-kind.ts
+++ b/src/parser/token-kind.ts
@@ -11,5 +11,6 @@ export enum TokenKind {
Range = 512,
Quoted = 1024,
Operator = 2048,
+ FilteredValue = 4096,
Delimited = Tag | Output
}
diff --git a/src/parser/tokenizer.spec.ts b/src/parser/tokenizer.spec.ts
index a425b50d14..f66f803563 100644
--- a/src/parser/tokenizer.spec.ts
+++ b/src/parser/tokenizer.spec.ts
@@ -118,7 +118,7 @@ describe('Tokenizer', function () {
it('should throw when {% raw %} not closed', function () {
const html = '{%raw%} {%endraw {%raw%}'
const tokenizer = new Tokenizer(html)
- expect(() => tokenizer.readTopLevelTokens()).toThrow('raw "{%raw%} {%end..." not closed, line:1, col:8')
+ expect(() => tokenizer.readTopLevelTokens()).toThrow('raw "{%raw%} {%endraw {%raw%}" not closed, line:1, col:8')
})
it('should read output token', function () {
const html = '{{foo | date: "%Y-%m-%d"}}
'
@@ -191,7 +191,7 @@ describe('Tokenizer', function () {
it('should throw if tag not closed', function () {
const html = '{% assign foo = bar {{foo}}'
const tokenizer = new Tokenizer(html)
- expect(() => tokenizer.readTopLevelTokens()).toThrow(/tag "{% assign foo..." not closed/)
+ expect(() => tokenizer.readTopLevelTokens()).toThrow('tag "{% assign foo = bar {{foo}}" not closed, line:1, col:1')
})
it('should throw if output not closed', function () {
const tokenizer = new Tokenizer('{{name}')
diff --git a/src/parser/tokenizer.ts b/src/parser/tokenizer.ts
index 45183a081b..5e69b11d71 100644
--- a/src/parser/tokenizer.ts
+++ b/src/parser/tokenizer.ts
@@ -1,5 +1,5 @@
-import { TagToken, HTMLToken, HashToken, QuotedToken, LiquidTagToken, OutputToken, ValueToken, Token, RangeToken, FilterToken, TopLevelToken, PropertyAccessToken, OperatorToken, LiteralToken, IdentifierToken, NumberToken } from '../tokens'
-import { Trie, createTrie, ellipsis, literalValues, assert, TokenizationError, TYPES, QUOTE, BLANK, IDENTIFIER } from '../util'
+import { FilteredValueToken, TagToken, HTMLToken, HashToken, QuotedToken, LiquidTagToken, OutputToken, ValueToken, Token, RangeToken, FilterToken, TopLevelToken, PropertyAccessToken, OperatorToken, LiteralToken, IdentifierToken, NumberToken } from '../tokens'
+import { Trie, createTrie, ellipsis, literalValues, TokenizationError, TYPES, QUOTE, BLANK, IDENTIFIER } from '../util'
import { Operators, Expression } from '../render'
import { NormalizedFullOptions, defaultOptions } from '../liquid-options'
import { FilterArg } from './filter-arg'
@@ -7,7 +7,7 @@ import { matchOperator } from './match-operator'
import { whiteSpaceCtrl } from './whitespace-ctrl'
export class Tokenizer {
- p = 0
+ p: number
N: number
private rawBeginAt = -1
private opTrie: Trie
@@ -15,9 +15,11 @@ export class Tokenizer {
constructor (
public input: string,
operators: Operators = defaultOptions.operators,
- public file?: string
+ public file?: string,
+ private range?: [number, number]
) {
- this.N = input.length
+ this.p = range ? range[0] : 0
+ this.N = range ? range[1] : input.length
this.opTrie = createTrie(operators)
}
@@ -46,6 +48,13 @@ export class Tokenizer {
if (end === -1) return
return new OperatorToken(this.input, this.p, (this.p = end), this.file)
}
+ readFilteredValue (): FilteredValueToken {
+ const begin = this.p
+ const initial = this.readExpression()
+ this.assert(initial.valid(), `invalid value expression: ${this.snapshot()}`)
+ const filters = this.readFilters()
+ return new FilteredValueToken(initial, filters, this.input, begin, this.p, this.file)
+ }
readFilters (): FilterToken[] {
const filters = []
while (true) {
@@ -57,11 +66,14 @@ export class Tokenizer {
readFilter (): FilterToken | null {
this.skipBlank()
if (this.end()) return null
- assert(this.peek() === '|', () => `expected "|" before filter`)
+ this.assert(this.peek() === '|', `expected "|" before filter`)
this.p++
const begin = this.p
const name = this.readIdentifier()
- if (!name.size()) return null
+ if (!name.size()) {
+ this.assert(this.end(), `expected filter name`)
+ return null
+ }
const args = []
this.skipBlank()
if (this.peek() === ':') {
@@ -70,12 +82,12 @@ export class Tokenizer {
const arg = this.readFilterArg()
arg && args.push(arg)
this.skipBlank()
- assert(this.end() || this.peek() === ',' || this.peek() === '|', () => `unexpected character ${this.snapshot()}`)
+ this.assert(this.end() || this.peek() === ',' || this.peek() === '|', () => `unexpected character ${this.snapshot()}`)
} while (this.peek() === ',')
} else if (this.peek() === '|' || this.end()) {
// do nothing
} else {
- throw new Error('expected ":" after filter name')
+ throw this.error('expected ":" after filter name')
}
return new FilterToken(name.getText(), args, this.input, begin, this.p, this.file)
}
@@ -121,7 +133,7 @@ export class Tokenizer {
const { file, input } = this
const begin = this.p
if (this.readToDelimiter(options.tagDelimiterRight) === -1) {
- throw this.mkError(`tag ${this.snapshot(begin)} not closed`, begin)
+ throw this.error(`tag ${this.snapshot(begin)} not closed`, begin)
}
const token = new TagToken(input, begin, this.p, options, file)
if (token.name === 'raw') this.rawBeginAt = begin
@@ -145,7 +157,7 @@ export class Tokenizer {
const { outputDelimiterRight } = options
const begin = this.p
if (this.readToDelimiter(outputDelimiterRight) === -1) {
- throw this.mkError(`output ${this.snapshot(begin)} not closed`, begin)
+ throw this.error(`output ${this.snapshot(begin)} not closed`, begin)
}
return new OutputToken(input, begin, this.p, options, file)
}
@@ -174,32 +186,38 @@ export class Tokenizer {
this.p++
}
}
- throw this.mkError(`raw ${this.snapshot(this.rawBeginAt)} not closed`, begin)
+ throw this.error(`raw ${this.snapshot(this.rawBeginAt)} not closed`, begin)
}
readLiquidTagTokens (options: NormalizedFullOptions = defaultOptions): LiquidTagToken[] {
const tokens: LiquidTagToken[] = []
while (this.p < this.N) {
const token = this.readLiquidTagToken(options)
- if (token.name) tokens.push(token)
+ token && tokens.push(token)
}
return tokens
}
- readLiquidTagToken (options: NormalizedFullOptions): LiquidTagToken {
- const { file, input } = this
+ readLiquidTagToken (options: NormalizedFullOptions): LiquidTagToken | undefined {
+ this.skipBlank()
+ if (this.end()) return
+
const begin = this.p
- let end = this.N
- if (this.readToDelimiter('\n') !== -1) end = this.p
- return new LiquidTagToken(input, begin, end, options, file)
+ this.readToDelimiter('\n')
+ const end = this.p
+ return new LiquidTagToken(this.input, begin, end, options, this.file)
+ }
+
+ error (msg: string, pos: number = this.p) {
+ return new TokenizationError(msg, new IdentifierToken(this.input, pos, this.N, this.file))
}
- mkError (msg: string, begin: number) {
- return new TokenizationError(msg, new IdentifierToken(this.input, begin, this.N, this.file))
+ assert (pred: unknown, msg: string | (() => string), pos?: number) {
+ if (!pred) throw this.error(typeof msg === 'function' ? msg() : msg, pos)
}
snapshot (begin: number = this.p) {
- return JSON.stringify(ellipsis(this.input.slice(begin), 16))
+ return JSON.stringify(ellipsis(this.input.slice(begin, this.N), 32))
}
/**
@@ -212,7 +230,7 @@ export class Tokenizer {
readIdentifier (): IdentifierToken {
this.skipBlank()
const begin = this.p
- while (this.peekType() & IDENTIFIER) ++this.p
+ while (!this.end() && this.peekType() & IDENTIFIER) ++this.p
return new IdentifierToken(this.input, begin, this.p, this.file)
}
@@ -250,7 +268,7 @@ export class Tokenizer {
}
remaining () {
- return this.input.slice(this.p)
+ return this.input.slice(this.p, this.N)
}
advance (i = 1) {
@@ -323,7 +341,7 @@ export class Tokenizer {
readValueOrThrow (): ValueToken {
const value = this.readValue()
- assert(value, () => `unexpected token ${this.snapshot()}, value expected`)
+ this.assert(value, () => `unexpected token ${this.snapshot()}, value expected`)
return value!
}
@@ -372,8 +390,8 @@ export class Tokenizer {
return TYPES[this.input.charCodeAt(this.p + n)]
}
- peek (n = 0) {
- return this.input[this.p + n]
+ peek (n = 0): string {
+ return this.p + n >= this.N ? '' : this.input[this.p + n]
}
skipBlank () {
diff --git a/src/render/expression.ts b/src/render/expression.ts
index 4807e3e307..663be1f86d 100644
--- a/src/render/expression.ts
+++ b/src/render/expression.ts
@@ -30,6 +30,9 @@ export class Expression {
}
return operands[0]
}
+ public valid () {
+ return !!this.postfix.length
+ }
}
export function * evalToken (token: Token | undefined, ctx: Context, lenient = false): IterableIterator {
diff --git a/src/tags/assign.ts b/src/tags/assign.ts
index c3416e538d..ba4d4b3eb9 100644
--- a/src/tags/assign.ts
+++ b/src/tags/assign.ts
@@ -1,16 +1,18 @@
-import { Value, assert, Tokenizer, Liquid, TopLevelToken, TagToken, Context, Tag } from '..'
+import { Value, Liquid, TopLevelToken, TagToken, Context, Tag } from '..'
export default class extends Tag {
private key: string
private value: Value
constructor (token: TagToken, remainTokens: TopLevelToken[], liquid: Liquid) {
super(token, remainTokens, liquid)
- const tokenizer = new Tokenizer(token.args, liquid.options.operators)
- this.key = tokenizer.readIdentifier().content
- tokenizer.skipBlank()
- assert(tokenizer.peek() === '=', () => `illegal token ${token.getText()}`)
- tokenizer.advance()
- this.value = new Value(tokenizer.remaining(), this.liquid)
+ this.key = this.tokenizer.readIdentifier().content
+ this.tokenizer.assert(this.key, 'expected variable name')
+
+ this.tokenizer.skipBlank()
+ this.tokenizer.assert(this.tokenizer.peek() === '=', 'expected "="')
+
+ this.tokenizer.advance()
+ this.value = new Value(this.tokenizer.readFilteredValue(), this.liquid)
}
* render (ctx: Context): Generator {
ctx.bottom()[this.key] = yield this.value.value(ctx, this.liquid.options.lenientIf)
diff --git a/src/tags/capture.ts b/src/tags/capture.ts
index 5abf02720e..deb493c00c 100644
--- a/src/tags/capture.ts
+++ b/src/tags/capture.ts
@@ -1,4 +1,4 @@
-import { Liquid, Tag, Tokenizer, assert, Template, Context, TagToken, TopLevelToken } from '..'
+import { Liquid, Tag, Template, Context, TagToken, TopLevelToken } from '..'
import { evalQuotedToken } from '../render'
import { isTagToken } from '../util'
@@ -7,9 +7,7 @@ export default class extends Tag {
templates: Template[] = []
constructor (tagToken: TagToken, remainTokens: TopLevelToken[], liquid: Liquid) {
super(tagToken, remainTokens, liquid)
- const tokenizer = new Tokenizer(tagToken.args, this.liquid.options.operators)
- this.variable = readVariableName(tokenizer)!
- assert(this.variable, () => `${tagToken.args} not valid identifier`)
+ this.variable = this.readVariableName()
while (remainTokens.length) {
const token = remainTokens.shift()!
@@ -23,11 +21,11 @@ export default class extends Tag {
const html = yield r.renderTemplates(this.templates, ctx)
ctx.bottom()[this.variable] = html
}
-}
-
-function readVariableName (tokenizer: Tokenizer) {
- const word = tokenizer.readIdentifier().content
- if (word) return word
- const quoted = tokenizer.readQuoted()
- if (quoted) return evalQuotedToken(quoted)
+ private readVariableName () {
+ const word = this.tokenizer.readIdentifier().content
+ if (word) return word
+ const quoted = this.tokenizer.readQuoted()
+ if (quoted) return evalQuotedToken(quoted)
+ throw this.tokenizer.error('invalid capture name')
+ }
}
diff --git a/src/tags/case.ts b/src/tags/case.ts
index 191fddabec..b55df24655 100644
--- a/src/tags/case.ts
+++ b/src/tags/case.ts
@@ -1,4 +1,4 @@
-import { ValueToken, Liquid, Tokenizer, toValue, evalToken, Value, Emitter, TagToken, TopLevelToken, Context, Template, Tag, ParseStream } from '..'
+import { ValueToken, Liquid, toValue, evalToken, Value, Emitter, TagToken, TopLevelToken, Context, Template, Tag, ParseStream } from '..'
export default class extends Tag {
value: Value
@@ -6,7 +6,7 @@ export default class extends Tag {
elseTemplates: Template[] = []
constructor (tagToken: TagToken, remainTokens: TopLevelToken[], liquid: Liquid) {
super(tagToken, remainTokens, liquid)
- this.value = new Value(tagToken.args, this.liquid)
+ this.value = new Value(this.tokenizer.readFilteredValue(), this.liquid)
this.elseTemplates = []
let p: Template[] = []
@@ -14,11 +14,10 @@ export default class extends Tag {
.on('tag:when', (token: TagToken) => {
p = []
- const tokenizer = new Tokenizer(token.args, this.liquid.options.operators)
const values: ValueToken[] = []
- while (!tokenizer.end()) {
- values.push(tokenizer.readValueOrThrow())
- tokenizer.readTo(',')
+ while (!token.tokenizer.end()) {
+ values.push(token.tokenizer.readValueOrThrow())
+ token.tokenizer.readTo(',')
}
this.branches.push({
values,
diff --git a/src/tags/cycle.ts b/src/tags/cycle.ts
index 396d31f7c7..63631b8094 100644
--- a/src/tags/cycle.ts
+++ b/src/tags/cycle.ts
@@ -1,27 +1,26 @@
-import { Tokenizer, assert, TopLevelToken, Liquid, ValueToken, evalToken, Emitter, TagToken, Context, Tag } from '..'
+import { TopLevelToken, Liquid, ValueToken, evalToken, Emitter, TagToken, Context, Tag } from '..'
export default class extends Tag {
private candidates: ValueToken[] = []
private group?: ValueToken
- constructor (tagToken: TagToken, remainTokens: TopLevelToken[], liquid: Liquid) {
- super(tagToken, remainTokens, liquid)
- const tokenizer = new Tokenizer(tagToken.args, this.liquid.options.operators)
- const group = tokenizer.readValue()
- tokenizer.skipBlank()
+ constructor (token: TagToken, remainTokens: TopLevelToken[], liquid: Liquid) {
+ super(token, remainTokens, liquid)
+ const group = this.tokenizer.readValue()
+ this.tokenizer.skipBlank()
if (group) {
- if (tokenizer.peek() === ':') {
+ if (this.tokenizer.peek() === ':') {
this.group = group
- tokenizer.advance()
+ this.tokenizer.advance()
} else this.candidates.push(group)
}
- while (!tokenizer.end()) {
- const value = tokenizer.readValue()
+ while (!this.tokenizer.end()) {
+ const value = this.tokenizer.readValue()
if (value) this.candidates.push(value)
- tokenizer.readTo(',')
+ this.tokenizer.readTo(',')
}
- assert(this.candidates.length, () => `empty candidates: ${tagToken.getText()}`)
+ this.tokenizer.assert(this.candidates.length, () => `empty candidates: "${token.getText()}"`)
}
* render (ctx: Context, emitter: Emitter): Generator {
diff --git a/src/tags/decrement.ts b/src/tags/decrement.ts
index b889273cba..0bdec57e3a 100644
--- a/src/tags/decrement.ts
+++ b/src/tags/decrement.ts
@@ -1,12 +1,11 @@
-import { Tag, Liquid, TopLevelToken, Tokenizer, Emitter, TagToken, Context } from '..'
+import { Tag, Liquid, TopLevelToken, Emitter, TagToken, Context } from '..'
import { isNumber, stringify } from '../util'
export default class extends Tag {
private variable: string
constructor (token: TagToken, remainTokens: TopLevelToken[], liquid: Liquid) {
super(token, remainTokens, liquid)
- const tokenizer = new Tokenizer(token.args, this.liquid.options.operators)
- this.variable = tokenizer.readIdentifier().content
+ this.variable = this.tokenizer.readIdentifier().content
}
render (context: Context, emitter: Emitter) {
const scope = context.environments
diff --git a/src/tags/echo.ts b/src/tags/echo.ts
index 844014ba03..41423a3494 100644
--- a/src/tags/echo.ts
+++ b/src/tags/echo.ts
@@ -1,12 +1,17 @@
import { Liquid, TopLevelToken, Emitter, Value, TagToken, Context, Tag } from '..'
export default class extends Tag {
- private value: Value
+ private value?: Value
+
constructor (token: TagToken, remainTokens: TopLevelToken[], liquid: Liquid) {
super(token, remainTokens, liquid)
- this.value = new Value(token.args, this.liquid)
+ this.tokenizer.skipBlank()
+ if (!this.tokenizer.end()) {
+ this.value = new Value(this.tokenizer.readFilteredValue(), this.liquid)
+ }
}
* render (ctx: Context, emitter: Emitter): Generator {
+ if (!this.value) return
const val = yield this.value.value(ctx, false)
emitter.write(val)
}
diff --git a/src/tags/for.ts b/src/tags/for.ts
index 1ed0f55099..2e65ed810a 100644
--- a/src/tags/for.ts
+++ b/src/tags/for.ts
@@ -1,4 +1,4 @@
-import { Hash, ValueToken, Liquid, Tag, Tokenizer, evalToken, Emitter, TagToken, TopLevelToken, Context, Template, ParseStream } from '..'
+import { Hash, ValueToken, Liquid, Tag, evalToken, Emitter, TagToken, TopLevelToken, Context, Template, ParseStream } from '..'
import { toEnumerable } from '../util/collection'
import { ForloopDrop } from '../drop/forloop-drop'
@@ -15,17 +15,16 @@ export default class extends Tag {
constructor (token: TagToken, remainTokens: TopLevelToken[], liquid: Liquid) {
super(token, remainTokens, liquid)
- const tokenizer = new Tokenizer(token.args, this.liquid.options.operators)
- const variable = tokenizer.readIdentifier()
- const inStr = tokenizer.readIdentifier()
- const collection = tokenizer.readValue()
+ const variable = this.tokenizer.readIdentifier()
+ const inStr = this.tokenizer.readIdentifier()
+ const collection = this.tokenizer.readValue()
if (!variable.size() || inStr.content !== 'in' || !collection) {
throw new Error(`illegal tag: ${token.getText()}`)
}
this.variable = variable.content
this.collection = collection
- this.hash = new Hash(tokenizer.remaining())
+ this.hash = new Hash(this.tokenizer.remaining())
this.templates = []
this.elseTemplates = []
diff --git a/src/tags/include.ts b/src/tags/include.ts
index 207c8ffc28..d433199f7f 100644
--- a/src/tags/include.ts
+++ b/src/tags/include.ts
@@ -1,4 +1,4 @@
-import { Template, ValueToken, TopLevelToken, Liquid, Tag, assert, Tokenizer, evalToken, Hash, Emitter, TagToken, Context } from '..'
+import { Template, ValueToken, TopLevelToken, Liquid, Tag, assert, evalToken, Hash, Emitter, TagToken, Context } from '..'
import { BlockMode, Scope } from '../context'
import { parseFilePath, renderFilePath } from './render'
@@ -7,8 +7,7 @@ export default class extends Tag {
private hash: Hash
constructor (token: TagToken, remainTokens: TopLevelToken[], liquid: Liquid) {
super(token, remainTokens, liquid)
- const args = token.args
- const tokenizer = new Tokenizer(args, this.liquid.options.operators)
+ const { tokenizer } = token
this['file'] = parseFilePath(tokenizer, this.liquid)
this['currentFile'] = token.file
@@ -27,7 +26,7 @@ export default class extends Tag {
const { liquid, hash, withVar } = this
const { renderer } = liquid
const filepath = (yield renderFilePath(this['file'], ctx, liquid)) as string
- assert(filepath, () => `illegal filename "${filepath}"`)
+ assert(filepath, () => `illegal file path "${filepath}"`)
const saved = ctx.saveRegister('blocks', 'blockMode')
ctx.setRegister('blocks', {})
diff --git a/src/tags/increment.ts b/src/tags/increment.ts
index 1748db2f7c..16a74a46d6 100644
--- a/src/tags/increment.ts
+++ b/src/tags/increment.ts
@@ -1,12 +1,11 @@
import { isNumber, stringify } from '../util'
-import { Tag, Liquid, TopLevelToken, Tokenizer, Emitter, TagToken, Context } from '..'
+import { Tag, Liquid, TopLevelToken, Emitter, TagToken, Context } from '..'
export default class extends Tag {
private variable: string
constructor (token: TagToken, remainTokens: TopLevelToken[], liquid: Liquid) {
super(token, remainTokens, liquid)
- const tokenizer = new Tokenizer(token.args, this.liquid.options.operators)
- this.variable = tokenizer.readIdentifier().content
+ this.variable = this.tokenizer.readIdentifier().content
}
render (context: Context, emitter: Emitter) {
const scope = context.environments
diff --git a/src/tags/layout.ts b/src/tags/layout.ts
index 7ffc04da10..d0b612dd22 100644
--- a/src/tags/layout.ts
+++ b/src/tags/layout.ts
@@ -1,4 +1,4 @@
-import { Scope, Template, Liquid, Tag, assert, Tokenizer, Emitter, Hash, TagToken, TopLevelToken, Context } from '..'
+import { Scope, Template, Liquid, Tag, assert, Emitter, Hash, TagToken, TopLevelToken, Context } from '..'
import { BlockMode } from '../context'
import { parseFilePath, renderFilePath, ParsedFileName } from './render'
import { BlankDrop } from '../drop'
@@ -9,10 +9,9 @@ export default class extends Tag {
file?: ParsedFileName
constructor (token: TagToken, remainTokens: TopLevelToken[], liquid: Liquid) {
super(token, remainTokens, liquid)
- const tokenizer = new Tokenizer(token.args, this.liquid.options.operators)
- this.file = parseFilePath(tokenizer, this.liquid)
+ this.file = parseFilePath(this.tokenizer, this.liquid)
this['currentFile'] = token.file
- this.args = new Hash(tokenizer.remaining())
+ this.args = new Hash(this.tokenizer.remaining())
this.templates = this.liquid.parser.parseTokens(remainTokens)
}
* render (ctx: Context, emitter: Emitter): Generator {
@@ -24,7 +23,7 @@ export default class extends Tag {
return
}
const filepath = (yield renderFilePath(this.file, ctx, liquid)) as string
- assert(filepath, () => `illegal filename "${filepath}"`)
+ assert(filepath, () => `illegal file path "${filepath}"`)
const templates = (yield liquid._parseLayoutFile(filepath, ctx.sync, this['currentFile'])) as Template[]
// render remaining contents and store rendered results
diff --git a/src/tags/liquid.ts b/src/tags/liquid.ts
index 18a4ff7dfb..7479591464 100644
--- a/src/tags/liquid.ts
+++ b/src/tags/liquid.ts
@@ -1,11 +1,10 @@
-import { Template, Tokenizer, Emitter, Liquid, TopLevelToken, TagToken, Context, Tag } from '..'
+import { Template, Emitter, Liquid, TopLevelToken, TagToken, Context, Tag } from '..'
export default class extends Tag {
templates: Template[]
constructor (token: TagToken, remainTokens: TopLevelToken[], liquid: Liquid) {
super(token, remainTokens, liquid)
- const tokenizer = new Tokenizer(token.args, this.liquid.options.operators)
- const tokens = tokenizer.readLiquidTagTokens(this.liquid.options)
+ const tokens = this.tokenizer.readLiquidTagTokens(this.liquid.options)
this.templates = this.liquid.parser.parseTokens(tokens)
}
* render (ctx: Context, emitter: Emitter): Generator {
diff --git a/src/tags/render.ts b/src/tags/render.ts
index 2e54d653ed..395d7dbe3c 100644
--- a/src/tags/render.ts
+++ b/src/tags/render.ts
@@ -11,8 +11,7 @@ export default class extends Tag {
private hash: Hash
constructor (token: TagToken, remainTokens: TopLevelToken[], liquid: Liquid) {
super(token, remainTokens, liquid)
- const args = token.args
- const tokenizer = new Tokenizer(args, this.liquid.options.operators)
+ const tokenizer = this.tokenizer
this.file = parseFilePath(tokenizer, this.liquid)
this.currentFile = token.file
while (!tokenizer.end()) {
@@ -50,7 +49,7 @@ export default class extends Tag {
* render (ctx: Context, emitter: Emitter): Generator {
const { liquid, hash } = this
const filepath = (yield renderFilePath(this['file'], ctx, liquid)) as string
- assert(filepath, () => `illegal filename "${filepath}"`)
+ assert(filepath, () => `illegal file path "${filepath}"`)
const childCtx = new Context({}, ctx.opts, { sync: ctx.sync, globals: ctx.globals, strictVariables: ctx.strictVariables })
const scope = childCtx.bottom()
@@ -86,8 +85,8 @@ export default class extends Tag {
export function parseFilePath (tokenizer: Tokenizer, liquid: Liquid): ParsedFileName {
if (liquid.options.dynamicPartials) {
const file = tokenizer.readValue()
- if (file === undefined) throw new TypeError(`illegal argument "${tokenizer.input}"`)
- if (file.getText() === 'none') return
+ tokenizer.assert(file, 'illegal file path')
+ if (file!.getText() === 'none') return
if (TypeGuards.isQuotedToken(file)) {
// for filenames like "files/{{file}}", eval as liquid template
const templates = liquid.parse(evalQuotedToken(file))
diff --git a/src/tags/tablerow.ts b/src/tags/tablerow.ts
index c69fe8d9f1..f0f6062ebf 100644
--- a/src/tags/tablerow.ts
+++ b/src/tags/tablerow.ts
@@ -1,7 +1,6 @@
import { toEnumerable } from '../util/collection'
import { ValueToken, Liquid, Tag, evalToken, Emitter, Hash, TagToken, TopLevelToken, Context, Template, ParseStream } from '..'
import { TablerowloopDrop } from '../drop/tablerowloop-drop'
-import { Tokenizer } from '../parser/tokenizer'
export default class extends Tag {
variable: string
@@ -10,20 +9,18 @@ export default class extends Tag {
collection: ValueToken
constructor (tagToken: TagToken, remainTokens: TopLevelToken[], liquid: Liquid) {
super(tagToken, remainTokens, liquid)
- const tokenizer = new Tokenizer(tagToken.args, this.liquid.options.operators)
+ const variable = this.tokenizer.readIdentifier()
+ this.tokenizer.skipBlank()
- const variable = tokenizer.readIdentifier()
- tokenizer.skipBlank()
-
- const predicate = tokenizer.readIdentifier()
- const collectionToken = tokenizer.readValue()
+ const predicate = this.tokenizer.readIdentifier()
+ const collectionToken = this.tokenizer.readValue()
if (predicate.content !== 'in' || !collectionToken) {
throw new Error(`illegal tag: ${tagToken.getText()}`)
}
this.variable = variable.content
this.collection = collectionToken
- this.args = new Hash(tokenizer.remaining())
+ this.args = new Hash(this.tokenizer.remaining())
this.templates = []
let p
diff --git a/src/template/output.spec.ts b/src/template/output.spec.ts
index 421477fe3a..fa8ac03e0b 100644
--- a/src/template/output.spec.ts
+++ b/src/template/output.spec.ts
@@ -7,31 +7,32 @@ import { defaultOptions } from '../liquid-options'
describe('Output', function () {
const emitter: any = { write: (html: string) => (emitter.html += html), html: '' }
const liquid = { options: {} } as any
+ const token = { content: 'obj', input: 'obj' } as OutputToken
beforeEach(() => { emitter.html = '' })
it('should stringify objects', async function () {
const scope = new Context({
- foo: { obj: { arr: ['a', 2] } }
+ obj: { foo: { arr: ['a', 2] } }
})
- const output = new Output({ content: 'foo' } as OutputToken, liquid)
+ const output = new Output(token, liquid)
await toPromise(output.render(scope, emitter))
return expect(emitter.html).toBe('[object Object]')
})
it('should skip function property', async function () {
const scope = new Context({ obj: { foo: 'foo', bar: (x: any) => x } })
- const output = new Output({ content: 'obj' } as OutputToken, liquid)
+ const output = new Output(token, liquid)
await toPromise(output.render(scope, emitter))
return expect(emitter.html).toBe('[object Object]')
})
it('should respect to .toString()', async () => {
const scope = new Context({ obj: { toString: () => 'FOO' } })
- const output = new Output({ content: 'obj' } as OutputToken, liquid)
+ const output = new Output(token, liquid)
await toPromise(output.render(scope, emitter))
return expect(emitter.html).toBe('FOO')
})
it('should respect to .toString()', async () => {
const scope = new Context({ obj: { toString: () => 'FOO' } })
- const output = new Output({ content: 'obj' } as OutputToken, liquid)
+ const output = new Output(token, liquid)
await toPromise(output.render(scope, emitter))
return expect(emitter.html).toBe('FOO')
})
@@ -47,6 +48,7 @@ describe('Output', function () {
html: '',
keepOutputType: true
}
+ const token = { content: 'foo', input: 'foo' } as OutputToken
beforeEach(() => { emitter.html = '' })
@@ -54,7 +56,7 @@ describe('Output', function () {
const scope = new Context({
foo: 42
}, { ...defaultOptions, keepOutputType: true })
- const output = new Output({ content: 'foo' } as OutputToken, liquid)
+ const output = new Output(token, liquid)
await toPromise(output.render(scope, emitter))
return expect(emitter.html).toBe(42)
})
@@ -62,7 +64,7 @@ describe('Output', function () {
const scope = new Context({
foo: true
}, { ...defaultOptions, keepOutputType: true })
- const output = new Output({ content: 'foo' } as OutputToken, liquid)
+ const output = new Output(token, liquid)
await toPromise(output.render(scope, emitter))
return expect(emitter.html).toBe(true)
})
@@ -70,7 +72,7 @@ describe('Output', function () {
const scope = new Context({
foo: 'test'
}, { ...defaultOptions, keepOutputType: true })
- const output = new Output({ content: 'foo' } as OutputToken, liquid)
+ const output = new Output(token, liquid)
await toPromise(output.render(scope, emitter))
return expect(emitter.html).toBe('test')
})
@@ -78,7 +80,7 @@ describe('Output', function () {
const scope = new Context({
foo: { a: { b: 42 } }
}, { ...defaultOptions, keepOutputType: true })
- const output = new Output({ content: 'foo' } as OutputToken, liquid)
+ const output = new Output(token, liquid)
await toPromise(output.render(scope, emitter))
return expect(emitter.html).toEqual({ a: { b: 42 } })
})
diff --git a/src/template/output.ts b/src/template/output.ts
index 6df047e76e..8fffd49006 100644
--- a/src/template/output.ts
+++ b/src/template/output.ts
@@ -3,6 +3,7 @@ import { Template, TemplateImpl } from '../template'
import { Context } from '../context/context'
import { Emitter } from '../emitters/emitter'
import { OutputToken } from '../tokens/output-token'
+import { Tokenizer } from '../parser'
import { Liquid } from '../liquid'
import { Filter } from './filter'
@@ -10,7 +11,8 @@ export class Output extends TemplateImpl implements Template {
value: Value
public constructor (token: OutputToken, liquid: Liquid) {
super(token)
- this.value = new Value(token.content, liquid)
+ const tokenizer = new Tokenizer(token.input, liquid.options.operators, token.file, token.contentRange)
+ this.value = new Value(tokenizer.readFilteredValue(), liquid)
const filters = this.value.filters
const outputEscape = liquid.options.outputEscape
if (!filters[filters.length - 1]?.raw && outputEscape) {
diff --git a/src/template/tag.ts b/src/template/tag.ts
index b74ccdb1a7..6f51752499 100644
--- a/src/template/tag.ts
+++ b/src/template/tag.ts
@@ -1,5 +1,6 @@
import { TemplateImpl } from './template-impl'
import type { Emitter } from '../emitters/emitter'
+import type { Tokenizer } from '../parser'
import type { Context } from '../context/context'
import type { TopLevelToken, TagToken } from '../tokens'
import type { Template } from './template'
@@ -10,11 +11,13 @@ export type TagRenderReturn = Generator | Promise implements Template {
public name: string
public liquid: Liquid
+ protected tokenizer: Tokenizer
public constructor (token: TagToken, remainTokens: TopLevelToken[], liquid: Liquid) {
super(token)
this.name = token.name
this.liquid = liquid
+ this.tokenizer = token.tokenizer
}
public abstract render (ctx: Context, emitter: Emitter): TagRenderReturn;
}
diff --git a/src/template/value.ts b/src/template/value.ts
index e0f6da1252..3658d244e9 100644
--- a/src/template/value.ts
+++ b/src/template/value.ts
@@ -2,6 +2,7 @@ import { Filter } from './filter'
import { Expression } from '../render'
import { Tokenizer } from '../parser'
import { assert } from '../util'
+import type { FilteredValueToken } from '../tokens'
import type { Liquid } from '../liquid'
import type { Context } from '../context'
@@ -12,10 +13,12 @@ export class Value {
/**
* @param str the value to be valuated, eg.: "foobar" | truncate: 3
*/
- public constructor (str: string, liquid: Liquid) {
- const tokenizer = new Tokenizer(str, liquid.options.operators)
- this.initial = tokenizer.readExpression()
- this.filters = tokenizer.readFilters().map(({ name, args }) => new Filter(name, this.getFilter(liquid, name), args, liquid))
+ public constructor (input: string | FilteredValueToken, liquid: Liquid) {
+ const token: FilteredValueToken = typeof input === 'string'
+ ? new Tokenizer(input, liquid.options.operators).readFilteredValue()
+ : input
+ this.initial = token.initial
+ this.filters = token.filters.map(({ name, args }) => new Filter(name, this.getFilter(liquid, name), args, liquid))
}
public * value (ctx: Context, lenient?: boolean): Generator {
lenient = lenient || (ctx.opts.lenientIf && this.filters.length > 0 && this.filters[0].name === 'default')
diff --git a/src/tokens/delimited-token.ts b/src/tokens/delimited-token.ts
index d960c64283..bd9885eb89 100644
--- a/src/tokens/delimited-token.ts
+++ b/src/tokens/delimited-token.ts
@@ -1,14 +1,14 @@
import { Token } from './token'
import { TokenKind } from '../parser'
-import { last } from '../util'
+import { TYPES, BLANK } from '../util'
export abstract class DelimitedToken extends Token {
public trimLeft = false
public trimRight = false
- public content: string
+ public contentRange: [number, number]
public constructor (
kind: TokenKind,
- content: string,
+ [contentBegin, contentEnd]: [number, number],
input: string,
begin: number,
end: number,
@@ -17,16 +17,19 @@ export abstract class DelimitedToken extends Token {
file?: string
) {
super(kind, input, begin, end, file)
- this.content = this.getText()
- const tl = content[0] === '-'
- const tr = last(content) === '-'
- this.content = content
- .slice(
- tl ? 1 : 0,
- tr ? -1 : content.length
- )
- .trim()
+ const tl = input[contentBegin] === '-'
+ const tr = input[contentEnd - 1] === '-'
+
+ let l = tl ? contentBegin + 1 : contentBegin
+ let r = tr ? contentEnd - 1 : contentEnd
+ while (l < r && (TYPES[input.charCodeAt(l)] & BLANK)) l++
+ while (r > l && (TYPES[input.charCodeAt(r - 1)] & BLANK)) r--
+
+ this.contentRange = [l, r]
this.trimLeft = tl || trimLeft
this.trimRight = tr || trimRight
}
+ get content () {
+ return this.input.slice(this.contentRange[0], this.contentRange[1])
+ }
}
diff --git a/src/tokens/filtered-value-token.ts b/src/tokens/filtered-value-token.ts
new file mode 100644
index 0000000000..3653c285e4
--- /dev/null
+++ b/src/tokens/filtered-value-token.ts
@@ -0,0 +1,22 @@
+import { Token } from './token'
+import { FilterToken } from './filter-token'
+import { TokenKind } from '../parser'
+import { Expression } from '../render'
+
+/**
+ * value expression with optional filters
+ * e.g.
+ * {% assign foo="bar" | append: "coo" %}
+ */
+export class FilteredValueToken extends Token {
+ constructor (
+ public initial: Expression,
+ public filters: FilterToken[],
+ public input: string,
+ public begin: number,
+ public end: number,
+ public file?: string
+ ) {
+ super(TokenKind.FilteredValue, input, begin, end, file)
+ }
+}
diff --git a/src/tokens/index.ts b/src/tokens/index.ts
index d117dcf8a7..d1feea6446 100644
--- a/src/tokens/index.ts
+++ b/src/tokens/index.ts
@@ -15,3 +15,4 @@ export * from './range-token'
export * from './value-token'
export * from './liquid-tag-token'
export * from './delimited-token'
+export * from './filtered-value-token'
diff --git a/src/tokens/liquid-tag-token.ts b/src/tokens/liquid-tag-token.ts
index d8dc7c2d71..e97be4e8ac 100644
--- a/src/tokens/liquid-tag-token.ts
+++ b/src/tokens/liquid-tag-token.ts
@@ -1,11 +1,14 @@
import { DelimitedToken } from './delimited-token'
-import { TokenizationError } from '../util'
import { NormalizedFullOptions } from '../liquid-options'
import { Tokenizer, TokenKind } from '../parser'
+/**
+ * LiquidTagToken is different from TagToken by not having delimiters `{%` or `%}`
+ */
export class LiquidTagToken extends DelimitedToken {
public name: string
public args: string
+ public tokenizer: Tokenizer
public constructor (
input: string,
begin: number,
@@ -13,20 +16,13 @@ export class LiquidTagToken extends DelimitedToken {
options: NormalizedFullOptions,
file?: string
) {
- const value = input.slice(begin, end)
- super(TokenKind.Tag, value, input, begin, end, false, false, file)
+ super(TokenKind.Tag, [begin, end], input, begin, end, false, false, file)
- if (!/\S/.test(value)) {
- // A line that contains only whitespace.
- this.name = ''
- this.args = ''
- } else {
- const tokenizer = new Tokenizer(this.content, options.operators)
- this.name = tokenizer.readTagName()
- if (!this.name) throw new TokenizationError(`illegal liquid tag syntax`, this)
+ this.tokenizer = new Tokenizer(input, options.operators, file, this.contentRange)
+ this.name = this.tokenizer.readTagName()
+ this.tokenizer.assert(this.name, 'illegal liquid tag syntax')
- tokenizer.skipBlank()
- this.args = tokenizer.remaining()
- }
+ this.tokenizer.skipBlank()
+ this.args = this.tokenizer.remaining()
}
}
diff --git a/src/tokens/output-token.ts b/src/tokens/output-token.ts
index da2d7eef96..91f6dc0543 100644
--- a/src/tokens/output-token.ts
+++ b/src/tokens/output-token.ts
@@ -11,7 +11,7 @@ export class OutputToken extends DelimitedToken {
file?: string
) {
const { trimOutputLeft, trimOutputRight, outputDelimiterLeft, outputDelimiterRight } = options
- const value = input.slice(begin + outputDelimiterLeft.length, end - outputDelimiterRight.length)
- super(TokenKind.Output, value, input, begin, end, trimOutputLeft, trimOutputRight, file)
+ const valueRange: [number, number] = [begin + outputDelimiterLeft.length, end - outputDelimiterRight.length]
+ super(TokenKind.Output, valueRange, input, begin, end, trimOutputLeft, trimOutputRight, file)
}
}
diff --git a/src/tokens/tag-token.ts b/src/tokens/tag-token.ts
index 6b23728805..2cb11afc57 100644
--- a/src/tokens/tag-token.ts
+++ b/src/tokens/tag-token.ts
@@ -1,11 +1,10 @@
import { DelimitedToken } from './delimited-token'
-import { TokenizationError } from '../util/error'
import { Tokenizer, TokenKind } from '../parser'
import type { NormalizedFullOptions } from '../liquid-options'
export class TagToken extends DelimitedToken {
public name: string
- public args: string
+ public tokenizer: Tokenizer
public constructor (
input: string,
begin: number,
@@ -14,14 +13,15 @@ export class TagToken extends DelimitedToken {
file?: string
) {
const { trimTagLeft, trimTagRight, tagDelimiterLeft, tagDelimiterRight } = options
- const value = input.slice(begin + tagDelimiterLeft.length, end - tagDelimiterRight.length)
- super(TokenKind.Tag, value, input, begin, end, trimTagLeft, trimTagRight, file)
+ const [valueBegin, valueEnd] = [begin + tagDelimiterLeft.length, end - tagDelimiterRight.length]
+ super(TokenKind.Tag, [valueBegin, valueEnd], input, begin, end, trimTagLeft, trimTagRight, file)
- const tokenizer = new Tokenizer(this.content, options.operators)
- this.name = tokenizer.readTagName()
- if (!this.name) throw new TokenizationError(`illegal tag syntax`, this)
-
- tokenizer.skipBlank()
- this.args = tokenizer.remaining()
+ this.tokenizer = new Tokenizer(input, options.operators, file, this.contentRange)
+ this.name = this.tokenizer.readTagName()
+ this.tokenizer.assert(this.name, `illegal tag syntax, tag name expected`)
+ this.tokenizer.skipBlank()
+ }
+ get args (): string {
+ return this.tokenizer.input.slice(this.tokenizer.p, this.contentRange[1])
}
}
diff --git a/src/util/error.ts b/src/util/error.ts
index f92930e5ea..6a71e94bf8 100644
--- a/src/util/error.ts
+++ b/src/util/error.ts
@@ -3,27 +3,32 @@ import { Token } from '../tokens/token'
import { Template } from '../template/template'
export abstract class LiquidError extends Error {
- private token: Token
- public context: string
- private originalError: Error
- public constructor (err: Error, token: Token) {
- super(err.message)
- this.originalError = err
- this.token = token
- this.context = ''
+ private token!: Token
+ public context = ''
+ private originalError?: Error
+ public constructor (err: Error | string, token: Token) {
+ super(typeof err === 'string' ? err : err.message)
+ if (typeof err !== 'string') this.defineUnEnumerable('originalError', err)
+ this.defineUnEnumerable('token', token)
+ }
+ private defineUnEnumerable (property: string, value: unknown) {
+ Object.defineProperty(this, property, {
+ value: value,
+ enumerable: false
+ })
}
protected update () {
- const err = this.originalError
- this.context = mkContext(this.token)
- this.message = mkMessage(err.message, this.token)
+ this.defineUnEnumerable('context', mkContext(this.token))
+ this.message = mkMessage(this.message, this.token)
this.stack = this.message + '\n' + this.context +
- '\n' + this.stack + '\nFrom ' + err.stack
+ '\n' + this.stack
+ if (this.originalError) this.stack += '\nFrom ' + this.originalError.stack
}
}
export class TokenizationError extends LiquidError {
public constructor (message: string, token: Token) {
- super(new Error(message), token)
+ super(message, token)
this.name = 'TokenizationError'
super.update()
}
@@ -80,7 +85,7 @@ export class AssertionError extends Error {
}
function mkContext (token: Token) {
- const [line] = token.getPosition()
+ const [line, col] = token.getPosition()
const lines = token.input.split('\n')
const begin = Math.max(line - 2, 1)
const end = Math.min(line + 3, lines.length)
@@ -88,10 +93,17 @@ function mkContext (token: Token) {
const context = _
.range(begin, end + 1)
.map(lineNumber => {
- const indicator = (lineNumber === line) ? '>> ' : ' '
+ const rowIndicator = (lineNumber === line) ? '>> ' : ' '
const num = _.padStart(String(lineNumber), String(end).length)
- const text = lines[lineNumber - 1]
- return `${indicator}${num}| ${text}`
+ let text = `${rowIndicator}${num}| `
+
+ const colIndicator = lineNumber === line
+ ? '\n' + _.padStart('^', col + text.length)
+ : ''
+
+ text += lines[lineNumber - 1]
+ text += colIndicator
+ return text
})
.join('\n')
diff --git a/test/e2e/issues.spec.ts b/test/e2e/issues.spec.ts
index b4de1f1ed8..268911c278 100644
--- a/test/e2e/issues.spec.ts
+++ b/test/e2e/issues.spec.ts
@@ -254,7 +254,7 @@ describe('Issues', function () {
})
it('#519 should throw parse error for invalid assign expression', () => {
const engine = new Liquid()
- expect(() => engine.parse('{% assign headshot = https://testurl.com/not_enclosed_in_quotes.jpg %}')).toThrow(/unexpected token at ":/)
+ expect(() => engine.parse('{% assign headshot = https://testurl.com/not_enclosed_in_quotes.jpg %}')).toThrow(/expected "|" before filter, line:1, col:27/)
})
it('#527 export Liquid Expression', () => {
const tokenizer = new Tokenizer('a > b')
diff --git a/test/integration/filters/array.spec.ts b/test/integration/filters/array.spec.ts
index f2835968d1..3afc974db2 100644
--- a/test/integration/filters/array.spec.ts
+++ b/test/integration/filters/array.spec.ts
@@ -22,7 +22,7 @@ describe('filters/array', function () {
it('should throw when comma missing', async () => {
const src = '{% assign beatles = "John, Paul, George, Ringo" | split: ", " %}' +
'{{ beatles | join " and " }}'
- return expect(render(src)).rejects.toThrow('unexpected token at "\\" and \\"", line:1, col:65')
+ return expect(render(src)).rejects.toThrow('expected ":" after filter name, line:1, col:83')
})
})
describe('last', () => {
@@ -138,6 +138,14 @@ describe('filters/array', function () {
'{{ "hello,world" | split: "," | sample: 1 | size }}',
'1'
))
+ it('should sample nil value', () => test(
+ '{{ nil | sample: 2 }}',
+ ''
+ ))
+ it('should sample string characters', () => test(
+ '{{ "aaa" | sample: 2 }}',
+ 'aa'
+ ))
})
describe('size', function () {
it('should return string length', () => test(
diff --git a/test/integration/tags/assign.spec.ts b/test/integration/tags/assign.spec.ts
index 2ff093d990..6ece66f45f 100644
--- a/test/integration/tags/assign.spec.ts
+++ b/test/integration/tags/assign.spec.ts
@@ -1,12 +1,12 @@
import { Liquid } from '../../../src/liquid'
-import { ParseError } from '../../../src'
+import { TokenizationError } from '../../../src'
describe('tags/assign', function () {
const liquid = new Liquid()
it('should throw when variable name illegal', function () {
const src = '{% assign / %}'
const ctx = {}
- return expect(liquid.parseAndRender(src, ctx)).rejects.toThrow(/illegal/)
+ return expect(liquid.parseAndRender(src, ctx)).rejects.toThrow(/expected variable name/)
})
it('should support assign to a string', async function () {
const src = '{% assign foo="bar" %}{{foo}}'
@@ -15,8 +15,8 @@ describe('tags/assign', function () {
})
it('should throw when variable value illegal', function () {
const src = '{% assign foo = “bar” %}'
- expect(() => liquid.parse(src)).toThrow(/unexpected token at "“bar”"/)
- expect(() => liquid.parse(src)).toThrow(ParseError)
+ expect(() => liquid.parse(src)).toThrow(/invalid value expression: "“bar”"/)
+ expect(() => liquid.parse(src)).toThrow(TokenizationError)
})
it('should support assign to a number', async function () {
const src = '{% assign foo=10086 %}{{foo}}'
diff --git a/test/integration/tags/capture.spec.ts b/test/integration/tags/capture.spec.ts
index 580ab9c1aa..0a971d279f 100644
--- a/test/integration/tags/capture.spec.ts
+++ b/test/integration/tags/capture.spec.ts
@@ -26,7 +26,7 @@ describe('tags/capture', function () {
it('should throw on invalid identifier', function () {
const src = '{% capture = %}{%endcapture%}'
return expect(liquid.parseAndRender(src))
- .rejects.toThrow(/= not valid identifier/)
+ .rejects.toThrow('invalid capture name, line:1, col:12')
})
it('should throw when capture not closed', function () {
diff --git a/test/integration/tags/cycle.spec.ts b/test/integration/tags/cycle.spec.ts
index 545312f1d5..3ffb549c82 100644
--- a/test/integration/tags/cycle.spec.ts
+++ b/test/integration/tags/cycle.spec.ts
@@ -11,7 +11,7 @@ describe('tags/cycle', function () {
it('should throw when cycle candidates empty', function () {
return expect(liquid.parseAndRender('{%cycle%}'))
- .rejects.toThrow(/empty candidates/)
+ .rejects.toThrow('empty candidates: "{%cycle%}", line:1, col:8')
})
it('should support cycle in for block', async function () {
diff --git a/test/integration/tags/include.spec.ts b/test/integration/tags/include.spec.ts
index 953c6cde39..a87e93e9a6 100644
--- a/test/integration/tags/include.spec.ts
+++ b/test/integration/tags/include.spec.ts
@@ -49,8 +49,8 @@ describe('tags/include', function () {
'/parent.html': '{%include , %}'
})
return liquid.renderFile('/parent.html').catch(function (e) {
- expect(e.name).toBe('ParseError')
- expect(e.message).toMatch(/illegal argument ","/)
+ expect(e.name).toBe('TokenizationError')
+ expect(e.message).toMatch('illegal file path, file:/parent.html, line:1, col:11')
})
})
@@ -60,7 +60,7 @@ describe('tags/include', function () {
})
return liquid.renderFile('/parent.html').catch(function (e) {
expect(e.name).toBe('RenderError')
- expect(e.message).toMatch(/illegal filename "undefined"/)
+ expect(e.message).toMatch(/illegal file path "undefined"/)
})
})
diff --git a/test/integration/tags/layout.spec.ts b/test/integration/tags/layout.spec.ts
index 4cca339664..7a944c5881 100644
--- a/test/integration/tags/layout.spec.ts
+++ b/test/integration/tags/layout.spec.ts
@@ -23,8 +23,8 @@ describe('tags/layout', function () {
'/parent.html': '{%layout%}'
})
return liquid.renderFile('/parent.html').catch(function (e) {
- expect(e.name).toBe('ParseError')
- expect(e.message).toMatch(/illegal argument ""/)
+ expect(e.name).toBe('TokenizationError')
+ expect(e.message).toMatch(/illegal file path/)
})
})
it('should throw when filename resolved to falsy', function () {
@@ -33,7 +33,7 @@ describe('tags/layout', function () {
})
return liquid.renderFile('/parent.html').catch(function (e) {
expect(e.name).toBe('RenderError')
- expect(e.message).toContain('illegal filename "undefined"')
+ expect(e.message).toContain('illegal file path')
})
})
it('should handle layout none', async function () {
diff --git a/test/integration/tags/render.spec.ts b/test/integration/tags/render.spec.ts
index 9c8aea0772..1ce90c4ab3 100644
--- a/test/integration/tags/render.spec.ts
+++ b/test/integration/tags/render.spec.ts
@@ -42,8 +42,8 @@ describe('tags/render', function () {
'/parent.html': '{%render%}'
})
return liquid.renderFile('/parent.html').catch(function (e) {
- expect(e.name).toBe('ParseError')
- expect(e.message).toMatch(/illegal argument ""/)
+ expect(e.name).toBe('TokenizationError')
+ expect(e.message).toMatch(/illegal file path/)
})
})
@@ -53,7 +53,7 @@ describe('tags/render', function () {
})
return liquid.renderFile('/parent.html').catch(function (e) {
expect(e.name).toBe('RenderError')
- expect(e.message).toMatch(/illegal filename "undefined"/)
+ expect(e.message).toMatch(/illegal file path/)
})
})
diff --git a/test/integration/util/error.spec.ts b/test/integration/util/error.spec.ts
index eb4aa864a8..c30d6ab937 100644
--- a/test/integration/util/error.spec.ts
+++ b/test/integration/util/error.spec.ts
@@ -25,11 +25,12 @@ describe('error', function () {
' 1| 1st',
' 2| 2nd',
'>> 3| X{% . a %} Y',
+ ' ^',
' 4| 4th',
'TokenizationError'
]
await expect(engine.parseAndRender(html.join('\n'))).rejects.toMatchObject({
- message: 'illegal tag syntax, line:3, col:2',
+ message: 'illegal tag syntax, tag name expected, line:3, col:5',
stack: expect.stringContaining(message.join('\n')),
name: 'TokenizationError'
})
@@ -61,7 +62,7 @@ describe('error', function () {
it('should throw error with [line, col] if tag unmatched', async function () {
await expect(engine.parseAndRender('1\n2\nfoo{% assign a = 4 }\n4')).rejects.toMatchObject({
name: 'TokenizationError',
- message: 'tag "{% assign a =..." not closed, line:3, col:4'
+ message: 'tag "{% assign a = 4 }\\n4" not closed, line:3, col:4'
})
})
})
@@ -122,6 +123,7 @@ describe('error', function () {
' 2| 2nd',
' 3| 3rd',
'>> 4| X{%throwingTag%} Y',
+ ' ^',
' 5| 5th',
' 6| 6th',
' 7| 7th',
@@ -150,6 +152,7 @@ describe('error', function () {
' 2| 2nd',
' 3| 3rd',
'>> 4| X{%throwingTag%} Y',
+ ' ^',
' 5| 5th',
' 6| {%block%}{%endblock%}',
' 7| 7th',
@@ -171,6 +174,7 @@ describe('error', function () {
' 2| 2nd',
' 3| 3rd',
'>> 4| X{%throwingTag%} Y',
+ ' ^',
' 5| 5th',
' 6| 6th',
' 7| 7th',
@@ -207,9 +211,15 @@ describe('error', function () {
})
})
it('should throw ParseError when tag not closed', async function () {
- await expect(engine.parseAndRender('{% if %}')).rejects.toMatchObject({
+ await expect(engine.parseAndRender('{% if true %}')).rejects.toMatchObject({
name: 'ParseError',
- message: expect.stringContaining('tag {% if %} not closed')
+ message: expect.stringContaining('tag {% if true %} not closed')
+ })
+ })
+ it('should throw ParseError when tag value not specified', async function () {
+ await expect(engine.parseAndRender('{% if %}{% endif %}')).rejects.toMatchObject({
+ name: 'TokenizationError',
+ message: 'invalid value expression: "", line:1, col:1'
})
})
it('should throw ParseError when tag parse throws', async function () {
@@ -238,6 +248,7 @@ describe('error', function () {
' 2| 2nd',
' 3| 3rd',
'>> 4| X{% a %} {% enda %} Y',
+ ' ^',
' 5| 5th',
' 6| 6th',
' 7| 7th',
@@ -255,6 +266,7 @@ describe('error', function () {
const message = [
' 1| 1st',
'>> 2| X{% a %} {% enda %} Y',
+ ' ^',
' 3| 3rd',
' 4| 4th',
'ParseError: tag "a" not found'
@@ -300,6 +312,7 @@ describe('error', function () {
' 2| 2nd',
' 3| 3rd',
'>> 4| X{%throwingTag%} Y',
+ ' ^',
' 5| 5th',
' 6| 6th',
' 7| 7th',