@@ -17,7 +17,8 @@ export const RESOLVE_METHOD = 'resolve';
17
17
* semantics.
18
18
*/
19
19
export class Token {
20
- private tokenKey ?: string ;
20
+ private tokenStringification ?: string ;
21
+ private tokenListification ?: string [ ] ;
21
22
22
23
/**
23
24
* Creates a token that resolves to `value`.
@@ -72,10 +73,10 @@ export class Token {
72
73
return this . valueOrFunction . toString ( ) ;
73
74
}
74
75
75
- if ( this . tokenKey === undefined ) {
76
- this . tokenKey = TOKEN_STRING_MAP . register ( this , this . displayName ) ;
76
+ if ( this . tokenStringification === undefined ) {
77
+ this . tokenStringification = TOKEN_MAP . registerString ( this , this . displayName ) ;
77
78
}
78
- return this . tokenKey ;
79
+ return this . tokenStringification ;
79
80
}
80
81
81
82
/**
@@ -89,6 +90,30 @@ export class Token {
89
90
throw new Error ( 'JSON.stringify() cannot be applied to structure with a Token in it. Use a document-specific stringification method instead.' ) ;
90
91
}
91
92
93
+ /**
94
+ * Return a string list representation of this token
95
+ *
96
+ * Call this if the Token intrinsically evaluates to a list of strings.
97
+ * If so, you can represent the Token in a similar way in the type
98
+ * system.
99
+ *
100
+ * Note that even though the Token is represented as a list of strings, you
101
+ * still cannot do any operations on it such as concatenation, indexing,
102
+ * or taking its length. The only useful operations you can do to these lists
103
+ * is constructing a `FnJoin` or a `FnSelect` on it.
104
+ */
105
+ public toList ( ) : string [ ] {
106
+ const valueType = typeof this . valueOrFunction ;
107
+ if ( valueType === 'string' || valueType === 'number' || valueType === 'boolean' ) {
108
+ throw new Error ( 'Got a literal Token value; cannot be encoded as a list.' ) ;
109
+ }
110
+
111
+ if ( this . tokenListification === undefined ) {
112
+ this . tokenListification = TOKEN_MAP . registerList ( this , this . displayName ) ;
113
+ }
114
+ return this . tokenListification ;
115
+ }
116
+
92
117
/**
93
118
* Return a concated version of this Token in a string context
94
119
*
@@ -103,12 +128,15 @@ export class Token {
103
128
104
129
/**
105
130
* Returns true if obj is a token (i.e. has the resolve() method or is a string
106
- * that includes token markers).
131
+ * that includes token markers), or it's a listifictaion of a Token string.
132
+ *
107
133
* @param obj The object to test.
108
134
*/
109
135
export function unresolved ( obj : any ) : boolean {
110
136
if ( typeof ( obj ) === 'string' ) {
111
- return TOKEN_STRING_MAP . createTokenString ( obj ) . test ( ) ;
137
+ return TOKEN_MAP . createStringTokenString ( obj ) . test ( ) ;
138
+ } else if ( Array . isArray ( obj ) && obj . length === 1 ) {
139
+ return isListToken ( obj [ 0 ] ) ;
112
140
} else {
113
141
return typeof ( obj [ RESOLVE_METHOD ] ) === 'function' ;
114
142
}
@@ -158,7 +186,7 @@ export function resolve(obj: any, prefix?: string[]): any {
158
186
// string - potentially replace all stringified Tokens
159
187
//
160
188
if ( typeof ( obj ) === 'string' ) {
161
- return TOKEN_STRING_MAP . resolveMarkers ( obj as string ) ;
189
+ return TOKEN_MAP . resolveStringTokens ( obj as string ) ;
162
190
}
163
191
164
192
//
@@ -169,27 +197,31 @@ export function resolve(obj: any, prefix?: string[]): any {
169
197
return obj ;
170
198
}
171
199
172
- //
173
- // tokens - invoke 'resolve' and continue to resolve recursively
174
- //
175
-
176
- if ( unresolved ( obj ) ) {
177
- const value = obj [ RESOLVE_METHOD ] ( ) ;
178
- return resolve ( value , path ) ;
179
- }
180
-
181
200
//
182
201
// arrays - resolve all values, remove undefined and remove empty arrays
183
202
//
184
203
185
204
if ( Array . isArray ( obj ) ) {
205
+ if ( containsListToken ( obj ) ) {
206
+ return TOKEN_MAP . resolveListTokens ( obj ) ;
207
+ }
208
+
186
209
const arr = obj
187
210
. map ( ( x , i ) => resolve ( x , path . concat ( i . toString ( ) ) ) )
188
211
. filter ( x => typeof ( x ) !== 'undefined' ) ;
189
212
190
213
return arr ;
191
214
}
192
215
216
+ //
217
+ // tokens - invoke 'resolve' and continue to resolve recursively
218
+ //
219
+
220
+ if ( unresolved ( obj ) ) {
221
+ const value = obj [ RESOLVE_METHOD ] ( ) ;
222
+ return resolve ( value , path ) ;
223
+ }
224
+
193
225
//
194
226
// objects - deep-resolve all values
195
227
//
@@ -221,6 +253,14 @@ export function resolve(obj: any, prefix?: string[]): any {
221
253
return result ;
222
254
}
223
255
256
+ function isListToken ( x : any ) {
257
+ return typeof ( x ) === 'string' && TOKEN_MAP . createListTokenString ( x ) . test ( ) ;
258
+ }
259
+
260
+ function containsListToken ( xs : any [ ] ) {
261
+ return xs . some ( isListToken ) ;
262
+ }
263
+
224
264
/**
225
265
* Central place where we keep a mapping from Tokens to their String representation
226
266
*
@@ -230,7 +270,7 @@ export function resolve(obj: any, prefix?: string[]): any {
230
270
* All instances of TokenStringMap share the same storage, so that this process
231
271
* works even when different copies of the library are loaded.
232
272
*/
233
- class TokenStringMap {
273
+ class TokenMap {
234
274
private readonly tokenMap : { [ key : string ] : Token } ;
235
275
236
276
constructor ( ) {
@@ -239,7 +279,7 @@ class TokenStringMap {
239
279
}
240
280
241
281
/**
242
- * Generating a unique string for this Token, returning a key
282
+ * Generate a unique string for this Token, returning a key
243
283
*
244
284
* Every call for the same Token will produce a new unique string, no
245
285
* attempt is made to deduplicate. Token objects should cache the
@@ -249,35 +289,56 @@ class TokenStringMap {
249
289
* hint. This may be used to produce aesthetically pleasing and
250
290
* recognizable token representations for humans.
251
291
*/
252
- public register ( token : Token , representationHint ?: string ) : string {
253
- const counter = Object . keys ( this . tokenMap ) . length ;
254
- const representation = representationHint || `TOKEN` ;
292
+ public registerString ( token : Token , representationHint ?: string ) : string {
293
+ const key = this . register ( token , representationHint ) ;
294
+ return `${ BEGIN_STRING_TOKEN_MARKER } ${ key } ${ END_TOKEN_MARKER } ` ;
295
+ }
255
296
256
- const key = `${ representation } .${ counter } ` ;
257
- if ( new RegExp ( `[^${ VALID_KEY_CHARS } ]` ) . exec ( key ) ) {
258
- throw new Error ( `Invalid characters in token representation: ${ key } ` ) ;
259
- }
297
+ /**
298
+ * Generate a unique string for this Token, returning a key
299
+ */
300
+ public registerList ( token : Token , representationHint ?: string ) : string [ ] {
301
+ const key = this . register ( token , representationHint ) ;
302
+ return [ `${ BEGIN_LIST_TOKEN_MARKER } ${ key } ${ END_TOKEN_MARKER } ` ] ;
303
+ }
260
304
261
- this . tokenMap [ key ] = token ;
262
- return `${ BEGIN_TOKEN_MARKER } ${ key } ${ END_TOKEN_MARKER } ` ;
305
+ /**
306
+ * Returns a `TokenString` for this string.
307
+ */
308
+ public createStringTokenString ( s : string ) {
309
+ return new TokenString ( s , BEGIN_STRING_TOKEN_MARKER , `[${ VALID_KEY_CHARS } ]+` , END_TOKEN_MARKER ) ;
263
310
}
264
311
265
312
/**
266
313
* Returns a `TokenString` for this string.
267
314
*/
268
- public createTokenString ( s : string ) {
269
- return new TokenString ( s , BEGIN_TOKEN_MARKER , `[${ VALID_KEY_CHARS } ]+` , END_TOKEN_MARKER ) ;
315
+ public createListTokenString ( s : string ) {
316
+ return new TokenString ( s , BEGIN_LIST_TOKEN_MARKER , `[${ VALID_KEY_CHARS } ]+` , END_TOKEN_MARKER ) ;
270
317
}
271
318
272
319
/**
273
320
* Replace any Token markers in this string with their resolved values
274
321
*/
275
- public resolveMarkers ( s : string ) : any {
276
- const str = this . createTokenString ( s ) ;
322
+ public resolveStringTokens ( s : string ) : any {
323
+ const str = this . createStringTokenString ( s ) ;
277
324
const fragments = str . split ( this . lookupToken . bind ( this ) ) ;
278
325
return fragments . join ( ) ;
279
326
}
280
327
328
+ public resolveListTokens ( xs : string [ ] ) : any {
329
+ // Must be a singleton list token, because concatenation is not allowed.
330
+ if ( xs . length !== 1 ) {
331
+ throw new Error ( `Cannot add elements to list token, got: ${ xs } ` ) ;
332
+ }
333
+
334
+ const str = this . createListTokenString ( xs [ 0 ] ) ;
335
+ const fragments = str . split ( this . lookupToken . bind ( this ) ) ;
336
+ if ( fragments . length !== 1 ) {
337
+ throw new Error ( `Cannot concatenate strings in a tokenized string array, got: ${ xs [ 0 ] } ` ) ;
338
+ }
339
+ return fragments . values ( ) [ 0 ] ;
340
+ }
341
+
281
342
/**
282
343
* Find a Token by key
283
344
*/
@@ -288,16 +349,30 @@ class TokenStringMap {
288
349
289
350
return this . tokenMap [ key ] ;
290
351
}
352
+
353
+ private register ( token : Token , representationHint ?: string ) : string {
354
+ const counter = Object . keys ( this . tokenMap ) . length ;
355
+ const representation = representationHint || `TOKEN` ;
356
+
357
+ const key = `${ representation } .${ counter } ` ;
358
+ if ( new RegExp ( `[^${ VALID_KEY_CHARS } ]` ) . exec ( key ) ) {
359
+ throw new Error ( `Invalid characters in token representation: ${ key } ` ) ;
360
+ }
361
+
362
+ this . tokenMap [ key ] = token ;
363
+ return key ;
364
+ }
291
365
}
292
366
293
- const BEGIN_TOKEN_MARKER = '${Token[' ;
367
+ const BEGIN_STRING_TOKEN_MARKER = '${Token[' ;
368
+ const BEGIN_LIST_TOKEN_MARKER = '#{Token[' ;
294
369
const END_TOKEN_MARKER = ']}' ;
295
370
const VALID_KEY_CHARS = 'a-zA-Z0-9:._-' ;
296
371
297
372
/**
298
373
* Singleton instance of the token string map
299
374
*/
300
- const TOKEN_STRING_MAP = new TokenStringMap ( ) ;
375
+ const TOKEN_MAP = new TokenMap ( ) ;
301
376
302
377
/**
303
378
* Interface that Token joiners implement
@@ -382,6 +457,10 @@ type Fragment = StringFragment | TokenFragment;
382
457
class TokenStringFragments {
383
458
private readonly fragments = new Array < Fragment > ( ) ;
384
459
460
+ public get length ( ) {
461
+ return this . fragments . length ;
462
+ }
463
+
385
464
public values ( ) : any [ ] {
386
465
return this . fragments . map ( f => f . type === 'token' ? resolve ( f . token ) : f . str ) ;
387
466
}
0 commit comments