1
1
import * as debug from 'debug'
2
2
3
- import { ISimpleToken , IRangeToken } from './IToken' ;
3
+ import { ISimpleToken , IRangeToken , sortTokenFAscTDesc , IToken , isRangeToken , isSimpleToken } from './IToken' ;
4
4
import { IModule , IModuleEnums } from './module'
5
5
import { IMatcher , IMatcherState , createClassMatcher } from './matchers'
6
6
import { ITokenEmitter } from './tokenProducers'
7
- import { isComment , isContinue , binarySearch , last } from './helpers'
8
- import { ws } from './classes'
7
+ import { isContinue , binarySearch , last , propsExist } from './helpers'
8
+ import { ws , TestFunc } from './classes'
9
9
import { createTokenEmitter , rangeProducer } from './tokenProducers'
10
+ import { Stream } from 'stream' ;
10
11
11
12
const printer = debug ( 'IChannel' )
12
13
@@ -21,8 +22,8 @@ export interface Snippet {
21
22
}
22
23
23
24
export interface Processed {
24
- snippets : Snippet [ ] ;
25
- tokens : IRangeToken [ ] ;
25
+ snippet : Snippet ;
26
+ token : IRangeToken ;
26
27
}
27
28
28
29
function compare ( a : any , b : any ) : 0 | - 1 | 1 {
@@ -42,6 +43,37 @@ const wsMatcher = createClassMatcher(ws, '>1')
42
43
const wsEmitter = createTokenEmitter ( rangeProducer , wsMatcher )
43
44
44
45
46
+ const regexp = ( s : RegExp ) => line => line . match ( s )
47
+ const isComment = line => {
48
+
49
+ if ( '*Cc' . includes ( line [ 0 ] ) ) {
50
+ const found : RegExpMatchArray = [ line ]
51
+ found . index = 0
52
+ found . input = line
53
+ return found
54
+ }
55
+ return null
56
+ }
57
+
58
+ const isNotComment = line => {
59
+ if ( ! isComment ( line ) ) {
60
+ const found : RegExpMatchArray = [ line ]
61
+ found . index = 0
62
+ found . input = line
63
+ return found
64
+ }
65
+ return null
66
+ }
67
+
68
+ const chain = compose ( ( a : Processed ) => a . snippet )
69
+
70
+ export const processLineContinuation = createProcessor ( regexp ( / \n \s { 5 } [ ^ \s ] / ) )
71
+ export const processNonComments = createProcessor ( isNotComment )
72
+ export const processWS = createProcessor ( regexp ( / [ \s \t ] + / ) )
73
+ export const processComments = createProcessor ( isComment )
74
+
75
+
76
+
45
77
export interface IChannel < T extends ISimpleToken > {
46
78
mod : IModule ;
47
79
name : string ;
@@ -97,7 +129,7 @@ export function createLogicalEOLChannel<T extends ISimpleToken>(ch: IChannel<T>)
97
129
tokens,
98
130
name : 'vlf' ,
99
131
process ( ) {
100
- tokens = [ ]
132
+ tokens . splice ( 0 )
101
133
const lftok = ch . tokens . slice ( 0 )
102
134
const raw = ch . mod . raw
103
135
let prev = 0
@@ -134,142 +166,178 @@ export function createCommentsChannel(ch: IChannel<ISimpleToken>): IChannel<IRan
134
166
const _lf = vlf || lf
135
167
const tokens : IRangeToken [ ] = [ ]
136
168
const raw = _lf . mod . raw
169
+
170
+ const pipeLine = chain ( processComments )
171
+
137
172
const comm : IChannel < IRangeToken > = {
138
173
mod : ch . mod ,
139
174
tokens,
140
175
name : 'comments' ,
141
176
process ( ) {
142
177
tokens . splice ( 0 )
143
178
const lftok = _lf . tokens . slice ( 0 ) //copy
144
- let prev = 0
145
- for ( let i = 0 ; i < lftok . length ; i ++ ) {
146
- const pos = lftok [ i ] . f
147
- const line = raw . slice ( prev , pos )
148
- if ( isComment ( line ) ) {
149
- tokens . push ( { f : prev , t : pos - 1 } )
150
- }
151
- prev = pos + 1
152
- }
153
- const lastf = last ( lftok ) . f
154
- if ( lastf < raw . length - 1 ) {
155
- const line = raw . slice ( lastf + 1 )
156
- if ( isComment ( line ) ) {
157
- tokens . push ( { f : lastf + 1 , t : raw . length - 1 } )
158
- }
179
+ for ( const processed of pipeLine ( createSnippetsUsingTokens ( raw , lftok ) ) ) {
180
+ tokens . push ( processed . token )
159
181
}
160
182
}
161
183
}
162
184
ch . mod . channels . set ( comm . name , comm )
163
185
return comm
164
186
}
165
187
166
- export function createSourceChannel ( ch : IChannel < ISimpleToken > ) : IChannel < IRangeToken > {
188
+ export function createChannelExcluding ( name : string , ... ch : IChannel < IToken > [ ] ) : IChannel < IRangeToken > {
167
189
168
- const vlf = ch . mod . channels . get ( 'vlf' )
169
- const comms = ch . mod . channels . get ( 'comments' ) as IChannel < IRangeToken >
170
- if ( vlf !== ch ) {
171
- throw new TypeError ( `source "vlf" channel is not registered with a module` )
190
+ if ( ch . length === 0 ) {
191
+ throw new Error ( `Illegal Arguments, no arguments given` )
172
192
}
173
- if ( comms === undefined ) {
174
- throw new TypeError ( `source "comments" channel is not registered with a module` )
193
+ const foundErrMod = ch . find ( fch => fch . mod !== ch [ 0 ] . mod )
194
+ if ( foundErrMod ) {
195
+ throw new Error ( `Channels dont come from the same module` )
175
196
}
197
+ // merge and sort all the tokens from the channels
176
198
const tokens : IRangeToken [ ] = [ ]
177
-
178
- const source : IChannel < IRangeToken > = {
179
- mod : ch . mod ,
180
- tokens, //vtokens
181
- name : 'source' ,
199
+ const raw = ch [ 0 ] . mod . raw
200
+ const rc : IChannel < IRangeToken > = {
201
+ mod : ch [ 0 ] . mod ,
202
+ tokens,
203
+ name,
182
204
process ( ) {
183
- tokens . splice ( 0 ) // delete in palce
184
- const lftok = vlf . tokens . slice ( 0 ) //copy
185
- const raw = vlf . mod . raw
205
+ const excludeTokens = ch . map ( c => c . tokens ) . reduce ( ( col , arr ) => {
206
+ col . push ( ...arr )
207
+ return col
208
+ } , [ ] )
209
+ excludeTokens . sort ( sortTokenFAscTDesc )
210
+ tokens . splice ( 0 )
186
211
let prev = 0
187
- const lastf = last ( lftok ) . f
188
- for ( let i = 0 ; i < lftok . length ; i ++ ) {
189
- const pos = lftok [ i ] . f
190
- const line = raw . slice ( prev , pos )
191
- if ( ! isComment ( line ) ) {
192
- tokens . push ( { f : prev , t : pos - 1 } )
193
- }
194
- prev = pos + 1
212
+ if ( excludeTokens . length === 0 ) {
213
+ tokens . push ( { f : 0 , t : raw . length - 1 } )
214
+ return
195
215
}
196
- if ( lastf < raw . length - 1 ) {
197
- const line = raw . slice ( lastf + 1 )
198
- if ( ! isComment ( line ) ) {
199
- tokens . push ( { f : lastf + 1 , t : raw . length - 1 } )
216
+ for ( const token of excludeTokens ) {
217
+ if ( token . f <= prev ) { // we skipped ahead temp
218
+ prev = Math . max ( token . f + 1 , prev )
219
+ if ( ( < IRangeToken > token ) . t ) {
220
+ prev = Math . max ( prev , ( < IRangeToken > token ) . t + 1 )
221
+ }
222
+ continue
200
223
}
224
+ tokens . push ( { f : prev , t : token . f - 1 } )
225
+ prev = isRangeToken ( token ) ?
226
+ ( < IRangeToken > token ) . t + 1 :
227
+ token . f + 1
228
+ }
229
+ const lastToken = last ( excludeTokens )
230
+ if ( ( < IRangeToken > lastToken ) . t &&
231
+ ( < IRangeToken > lastToken ) . t < raw . length - 1 ) {
232
+ tokens . push ( { f : ( < IRangeToken > lastToken ) . t + 1 , t : raw . length - 1 } )
233
+ }
234
+ else if ( lastToken . f < raw . length - 1 ) {
235
+ tokens . push ( { f : lastToken . f + 1 , t : raw . length - 1 } )
201
236
}
202
237
}
203
238
}
204
- ch . mod . channels . set ( source . name , source )
205
- return source
239
+ ch [ 0 ] . mod . channels . set ( name , rc )
240
+ return rc
206
241
}
207
242
208
243
export function createWSChannel ( ch : IChannel < IRangeToken > ) : IChannel < IRangeToken > {
209
244
210
- const vlf = ch . mod . channels . get ( 'vlf' ) as IChannel < ISimpleToken >
211
245
const source = ch . mod . channels . get ( 'source' ) as IChannel < IRangeToken >
212
- if ( vlf !== ch ) {
213
- throw new TypeError ( `source "vlf" channel is not registered with a module` )
214
- }
215
246
if ( source === undefined ) {
216
247
throw new TypeError ( `source "comments" channel is not registered with a module` )
217
248
}
218
- const raw = vlf . mod . raw
249
+ const raw = ch . mod . raw
250
+ const pipeLine = chain ( processLineContinuation , processWS )
219
251
const tokens : IRangeToken [ ] = [ ]
220
- const nonWSSource : Snippet [ ] = [ ]
221
252
const ws : IChannel < IRangeToken > = {
222
253
mod : ch . mod ,
223
- tokens : [ ] , //vtokens
254
+ tokens, //vtokens
224
255
name : 'ws' ,
225
256
process ( ) {
226
257
tokens . splice ( 0 )
227
- nonWSSource . splice ( 0 )
228
- const srctok = source . tokens . slice ( 0 ) //copy
229
- for ( let i = 0 ; i < srctok . length ; i ++ ) {
230
- const { f, t } = srctok [ i ]
231
- let snip = { line : raw . slice ( f , t + 1 ) , f, t }
232
- // split out continueation lines
233
- const { snippets, tokens : _tokens } = processLineContinuation ( snip )
234
- tokens . splice ( 0 , 0 , ..._tokens )
235
- snippets . map ( processWS ) . forEach ( ( { snippets : snips , tokens : toks } ) => {
236
- tokens . splice ( 0 , 0 , ...toks )
237
- nonWSSource . splice ( 0 , 0 , ...snips )
238
- } )
239
- // here the ws token need to be extracted from line
258
+ const tok = source . tokens . slice ( 0 ) //copy
259
+ for ( const processed of pipeLine ( createSnippetsUsingTokens ( raw , tok ) ) ) {
260
+ tokens . push ( processed . token )
240
261
}
241
- // sort ws tokens because there will be continue line stuff here!!
262
+ tokens . sort ( ( t1 , t2 ) => t1 . f - t2 . f )
242
263
}
243
264
}
244
- ch . mod . channels . set ( source . name , source )
245
- return source
265
+ ch . mod . channels . set ( ws . name , ws )
266
+ return ws
246
267
}
247
268
248
- export function createProcessor ( regex : RegExp ) {
269
+ export function createProcessor ( matcher : TestFunc ) {
249
270
250
- return function process ( s : Snippet ) : Processed {
271
+ return function * processor ( s : Snippet ) : IterableIterator < Processed > {
251
272
const { line, f, t } = s
252
- const found = line . match ( regex ) ;
253
- const rc = {
254
- snippets : [ s ] ,
255
- tokens : [ ]
256
- }
257
-
273
+ const found = matcher ( line )
258
274
if ( found ) {
259
275
const first = line . slice ( 0 , found . index )
260
276
const second = line . slice ( found . index + found [ 0 ] . length )
261
- rc . snippets [ 0 ] = { line : first , f, t : f + first . length - 1 }
262
- rc . tokens [ 0 ] = { f : f + found . index , t : f + found . index + found [ 0 ] . length - 1 }
277
+ yield {
278
+ snippet : { line : first , f, t : f + first . length - 1 } ,
279
+ token : { f : f + found . index , t : f + found . index + found [ 0 ] . length - 1 }
280
+ }
263
281
if ( second ) {
264
- const rv = process ( { line : second , f : f + found . index + found [ 0 ] . length , t } )
265
- rc . tokens . splice ( 0 , 0 , ...rv . tokens )
266
- rc . snippets . splice ( 0 , 0 , ...rv . snippets )
282
+ yield * processor ( { line : second , f : f + found . index + found [ 0 ] . length , t } )
267
283
}
268
284
}
269
- return rc
270
285
}
271
286
}
272
287
273
- export const processLineContinuation = createProcessor ( / \n \s { 5 } [ ^ \s ] / )
274
- export const processWS = createProcessor ( / [ \s \t ] + / )
288
+ function * createSnippetsUsingTokens ( raw : string , tokens : ( ISimpleToken | IRangeToken ) [ ] ) : IterableIterator < Snippet > {
289
+ if ( ! ( raw || '' ) . trim ( ) ) {
290
+ return
291
+ }
292
+ let prev = 0
293
+
294
+ for ( const token of tokens ) {
295
+ if ( isRangeToken ( token ) ) { // range token
296
+ const { f, t } = < IRangeToken > token
297
+ yield { line : raw . slice ( f , t + 1 ) , f, t }
298
+ prev = t + 1
299
+ }
300
+ else if ( isSimpleToken ( token ) ) { //simpletoken
301
+ const { f } = < ISimpleToken > token
302
+ yield { line : raw . slice ( prev , f ) , f : prev , t : f - 1 }
303
+ prev = f + 1
304
+
305
+ }
306
+ else {
307
+ throw new Error ( `token is not a SimpleToken or a RangeToken, i.e: [${ JSON . stringify ( token ) } ]` )
308
+ }
309
+ }
310
+ const lastToken = last ( tokens )
311
+ if (
312
+ isSimpleToken ( lastToken ) //slicer token
313
+ || lastToken === undefined //source code has only one-liner?
314
+ ) {
315
+ const f = lastToken && lastToken . f || 0
316
+ if ( raw . length - 1 > f ) {
317
+ yield { line : raw . slice ( f + 1 , raw . length ) , f : f + 1 , t : raw . length - 1 }
318
+ }
319
+ }
320
+ }
321
+
322
+
323
+ export function compose < T , K > ( convert : ( a : K ) => T ) {
324
+
325
+ return function chain ( ...transformers : ( ( s : T ) => IterableIterator < K > ) [ ] ) {
275
326
327
+ function * stream ( data : T , ...fns : ( ( s : T ) => IterableIterator < K > ) [ ] ) {
328
+ const [ fn , ...others ] = fns
329
+ for ( const elt of fn ( data ) ) {
330
+ yield elt
331
+ if ( others . length ) {
332
+ yield * stream ( convert ( elt ) , ...others )
333
+ }
334
+ }
335
+ }
336
+
337
+ return function * activate ( gen : IterableIterator < T > ) : IterableIterator < K > {
338
+ for ( const elt of gen ) {
339
+ yield * stream ( elt , ...transformers )
340
+ }
341
+ }
342
+ }
343
+ }
0 commit comments