1
1
import * as debug from 'debug'
2
2
3
- import { ISimpleToken , IRangeToken , sortTokenFAscTDesc , IToken , isRangeToken , isSimpleToken } from './IToken' ;
4
- import { IModule , IModuleEnums } from './module'
5
- import { IMatcher , IMatcherState , createClassMatcher } from './matchers'
6
- import { ITokenEmitter } from './tokenProducers'
7
- import { isContinue , binarySearch , last , propsExist } from './helpers'
8
- import { ws , TestFunc } from './classes'
9
- import { createTokenEmitter , rangeProducer } from './tokenProducers'
10
- import { Stream } from 'stream' ;
3
+ import {
4
+ ISimpleToken ,
5
+ IRangeToken ,
6
+ sortTokenFAscTDesc ,
7
+ IToken ,
8
+ isRangeToken
9
+ } from './IToken' ;
10
+
11
+
12
+ import { last } from './helpers'
13
+ import {
14
+ chain ,
15
+ Processed ,
16
+ createSnippetsUsingTokens
17
+ } from './processors'
11
18
12
19
const printer = debug ( 'IChannel' )
13
20
14
- export interface linePos {
15
- ln : number ;
16
- col : number ;
21
+ export interface IChannel < T extends ISimpleToken > {
22
+ name : string ;
23
+ tokens : T [ ] ;
24
+ process ( ) ;
17
25
}
18
- export interface Snippet {
19
- line : string ;
20
- f : number ;
21
- t : number ;
26
+ export interface ISpring < T > {
27
+ slicers : T [ ] ,
28
+ data : string
22
29
}
23
30
24
- export interface Processed {
25
- snippet : Snippet ;
26
- token : IRangeToken ;
31
+ export function tokenAsSimple ( res : Processed ) : ISimpleToken {
32
+ return { f : res . token . f }
27
33
}
28
34
29
- function compare ( a : any , b : any ) : 0 | - 1 | 1 {
30
- const df = a . f - b . f
31
- if ( df > 0 ) return 1
32
- if ( df < 0 ) return - 1
33
- if ( a . t !== undefined && b . t !== undefined ) {
34
- const dt = a . t - b . t
35
- if ( dt > 0 ) return 1
36
- if ( dt < 0 ) return - 1
37
- }
38
- return 0
39
- }
40
-
41
- const search = binarySearch ( compare )
42
- const wsMatcher = createClassMatcher ( ws , '>1' )
43
- const wsEmitter = createTokenEmitter ( rangeProducer , wsMatcher )
44
-
45
-
46
- const regexp = ( s : RegExp ) => line => line . match ( s )
47
- const isComment = line => {
48
-
49
- if ( '*Cc' . includes ( line [ 0 ] ) ) {
50
- const found : RegExpMatchArray = [ line ]
51
- found . index = 0
52
- found . input = line
53
- return found
54
- }
55
- return null
35
+ export function tokenAsRange ( res : Processed ) : IRangeToken {
36
+ return { f : res . token . f , t : res . token . t }
56
37
}
57
38
58
- const isNotComment = line => {
59
- if ( ! isComment ( line ) ) {
60
- const found : RegExpMatchArray = [ line ]
61
- found . index = 0
62
- found . input = line
63
- return found
64
- }
65
- return null
66
- }
67
-
68
- const chain = compose ( ( a : Processed ) => a . snippet )
69
-
70
- export const processLineContinuation = createProcessor ( regexp ( / \n \s { 5 } [ ^ \s ] / ) )
71
- export const processNonComments = createProcessor ( isNotComment )
72
- export const processWS = createProcessor ( regexp ( / [ \s \t ] + / ) )
73
- export const processComments = createProcessor ( isComment )
74
-
75
-
76
-
77
- export interface IChannel < T extends ISimpleToken > {
78
- mod : IModule ;
79
- name : string ;
80
- tokens : T [ ] ;
81
- process ( ) ;
82
- }
83
-
84
- export function createChannel < T extends ISimpleToken > ( name : string ) {
85
- return function addTokenMatchers ( ...te : ( ITokenEmitter < T > ) [ ] ) {
86
- if ( te . length === 0 ) {
87
- throw new Error ( `No token emitter functions specified` )
88
- }
89
- return function setModule ( module : IModule ) : IChannel < T > {
90
- //module,
91
- const channel = {
92
- mod : module ,
93
- tokens : [ ] ,
94
- name,
95
- process ( ) {
96
- if ( ! module . state . has ( IModuleEnums . loaded ) ) {
97
- throw new Error ( `Module [${ module . name } ] not loaded` )
98
- }
99
- const raw = module . raw
100
- if ( module . raw . length === 0 ) {
101
- printer ( `module [${ module . name } ] loaded an empty file` )
102
- return
103
- }
104
- this . tokens = [ ] //clear
105
- for ( let i = 0 ; i < raw . length ; i ++ ) {
106
- te . forEach ( fn => {
107
- const token = fn ( raw [ i ] , i )
108
- if ( token ) {
109
- this . tokens . push ( token )
110
- }
111
- } )
39
+ export function createChannel < T extends ISimpleToken , K extends ISimpleToken > ( name : string , convert : ( res : Processed ) => T ) {
40
+ return function slicer ( pipe : ( Snippet ) => IterableIterator < Processed > ) {
41
+ return function ( spring ?: ( ) => ISpring < K > ) {
42
+ return function setModule ( raw : string ) : IChannel < T > {
43
+
44
+ const tokens : T [ ] = [ ]
45
+ const channel = {
46
+ tokens,
47
+ name,
48
+ process ( ) {
49
+ tokens . splice ( 0 )
50
+ if ( raw . length === 0 ) {
51
+ printer ( `channel [${ name } ] processed an empty file` )
52
+ return
53
+ }
54
+ const { data, slicers } = spring ? spring ( ) : { data : raw , slicers : [ { f : raw . length } ] }
55
+ for ( const processed of pipe ( createSnippetsUsingTokens ( data , slicers ) ) ) {
56
+ //console.log(processed)
57
+ tokens . push ( convert ( processed ) )
58
+ }
59
+ tokens . sort ( sortTokenFAscTDesc )
112
60
}
113
61
}
62
+ return channel
114
63
}
115
- module . channels . set ( name , channel )
116
- return channel
117
64
}
118
65
}
119
66
}
120
67
121
- export function createLogicalEOLChannel < T extends ISimpleToken > ( ch : IChannel < T > ) : IChannel < T > {
122
68
123
- if ( ch !== ch . mod . channels . get ( 'lf' ) ) {
124
- throw new TypeError ( `source "lf" channel is not registered with a module` )
125
- }
126
- let tokens : T [ ] = [ ]
127
- const vCh : IChannel < T > = {
128
- mod : ch . mod ,
69
+ export function createLogicalEOLChannel ( ch : IChannel < ISimpleToken > , raw :string ) : IChannel < ISimpleToken > {
70
+
71
+ let tokens : ISimpleToken [ ] = [ ]
72
+ const vCh : IChannel < ISimpleToken > = {
129
73
tokens,
130
74
name : 'vlf' ,
131
75
process ( ) {
132
76
tokens . splice ( 0 )
133
77
const lftok = ch . tokens . slice ( 0 )
134
- const raw = ch . mod . raw
135
78
let prev = 0
136
79
for ( let i = 0 ; i < lftok . length ; i ++ ) {
137
80
const pos = ch . tokens [ i ] . f
138
81
const line = raw . slice ( prev , pos )
139
82
prev = pos + 1
140
- if ( isContinue ( line ) ) {
83
+ if ( line . match ( / ^ \s { 5 } [ ^ \s ] / ) ) {
141
84
if ( i === 0 ) {
142
85
const err = `first line cannot be continuation: [${ line } ]`
143
86
printer ( err )
@@ -147,58 +90,21 @@ export function createLogicalEOLChannel<T extends ISimpleToken>(ch: IChannel<T>)
147
90
tokens [ tokens . length - 1 ] = lftok [ i ]
148
91
continue
149
92
}
150
- tokens . push ( lftok [ i ] )
93
+ tokens . push ( { f : lftok [ i ] . f } )
151
94
}
152
95
}
153
96
}
154
- ch . mod . channels . set ( vCh . name , vCh )
155
97
return vCh
156
98
}
157
99
158
- export function createCommentsChannel ( ch : IChannel < ISimpleToken > ) : IChannel < IRangeToken > {
159
-
160
- const vlf = ch . mod . channels . get ( 'vlf' )
161
- const lf = ch . mod . channels . get ( 'lf' )
162
-
163
- if ( ch !== vlf && ch !== lf ) {
164
- throw new TypeError ( `source "lf/vlf" channel is not registered with a module` )
165
- }
166
- const _lf = vlf || lf
167
- const tokens : IRangeToken [ ] = [ ]
168
- const raw = _lf . mod . raw
169
-
170
- const pipeLine = chain ( processComments )
171
-
172
- const comm : IChannel < IRangeToken > = {
173
- mod : ch . mod ,
174
- tokens,
175
- name : 'comments' ,
176
- process ( ) {
177
- tokens . splice ( 0 )
178
- const lftok = _lf . tokens . slice ( 0 ) //copy
179
- for ( const processed of pipeLine ( createSnippetsUsingTokens ( raw , lftok ) ) ) {
180
- tokens . push ( processed . token )
181
- }
182
- }
183
- }
184
- ch . mod . channels . set ( comm . name , comm )
185
- return comm
186
- }
187
-
188
- export function createChannelExcluding ( name : string , ...ch : IChannel < IToken > [ ] ) : IChannel < IRangeToken > {
100
+ export function createChannelExcluding ( name : string , raw : string , ...ch : IChannel < IToken > [ ] ) : IChannel < IRangeToken > {
189
101
190
102
if ( ch . length === 0 ) {
191
103
throw new Error ( `Illegal Arguments, no arguments given` )
192
104
}
193
- const foundErrMod = ch . find ( fch => fch . mod !== ch [ 0 ] . mod )
194
- if ( foundErrMod ) {
195
- throw new Error ( `Channels dont come from the same module` )
196
- }
197
105
// merge and sort all the tokens from the channels
198
106
const tokens : IRangeToken [ ] = [ ]
199
- const raw = ch [ 0 ] . mod . raw
200
107
const rc : IChannel < IRangeToken > = {
201
- mod : ch [ 0 ] . mod ,
202
108
tokens,
203
109
name,
204
110
process ( ) {
@@ -236,108 +142,6 @@ export function createChannelExcluding(name: string, ...ch: IChannel<IToken>[]):
236
142
}
237
143
}
238
144
}
239
- ch [ 0 ] . mod . channels . set ( name , rc )
240
145
return rc
241
146
}
242
147
243
- export function createWSChannel ( ch : IChannel < IRangeToken > ) : IChannel < IRangeToken > {
244
-
245
- const source = ch . mod . channels . get ( 'source' ) as IChannel < IRangeToken >
246
- if ( source === undefined ) {
247
- throw new TypeError ( `source "comments" channel is not registered with a module` )
248
- }
249
- const raw = ch . mod . raw
250
- const pipeLine = chain ( processLineContinuation , processWS )
251
- const tokens : IRangeToken [ ] = [ ]
252
- const ws : IChannel < IRangeToken > = {
253
- mod : ch . mod ,
254
- tokens, //vtokens
255
- name : 'ws' ,
256
- process ( ) {
257
- tokens . splice ( 0 )
258
- const tok = source . tokens . slice ( 0 ) //copy
259
- for ( const processed of pipeLine ( createSnippetsUsingTokens ( raw , tok ) ) ) {
260
- tokens . push ( processed . token )
261
- }
262
- tokens . sort ( ( t1 , t2 ) => t1 . f - t2 . f )
263
- }
264
- }
265
- ch . mod . channels . set ( ws . name , ws )
266
- return ws
267
- }
268
-
269
- export function createProcessor ( matcher : TestFunc ) {
270
-
271
- return function * processor ( s : Snippet ) : IterableIterator < Processed > {
272
- const { line, f, t } = s
273
- const found = matcher ( line )
274
- if ( found ) {
275
- const first = line . slice ( 0 , found . index )
276
- const second = line . slice ( found . index + found [ 0 ] . length )
277
- yield {
278
- snippet : { line : first , f, t : f + first . length - 1 } ,
279
- token : { f : f + found . index , t : f + found . index + found [ 0 ] . length - 1 }
280
- }
281
- if ( second ) {
282
- yield * processor ( { line : second , f : f + found . index + found [ 0 ] . length , t } )
283
- }
284
- }
285
- }
286
- }
287
-
288
- function * createSnippetsUsingTokens ( raw : string , tokens : ( ISimpleToken | IRangeToken ) [ ] ) : IterableIterator < Snippet > {
289
- if ( ! ( raw || '' ) . trim ( ) ) {
290
- return
291
- }
292
- let prev = 0
293
-
294
- for ( const token of tokens ) {
295
- if ( isRangeToken ( token ) ) { // range token
296
- const { f, t } = < IRangeToken > token
297
- yield { line : raw . slice ( f , t + 1 ) , f, t }
298
- prev = t + 1
299
- }
300
- else if ( isSimpleToken ( token ) ) { //simpletoken
301
- const { f } = < ISimpleToken > token
302
- yield { line : raw . slice ( prev , f ) , f : prev , t : f - 1 }
303
- prev = f + 1
304
-
305
- }
306
- else {
307
- throw new Error ( `token is not a SimpleToken or a RangeToken, i.e: [${ JSON . stringify ( token ) } ]` )
308
- }
309
- }
310
- const lastToken = last ( tokens )
311
- if (
312
- isSimpleToken ( lastToken ) //slicer token
313
- || lastToken === undefined //source code has only one-liner?
314
- ) {
315
- const f = lastToken && lastToken . f || 0
316
- if ( raw . length - 1 > f ) {
317
- yield { line : raw . slice ( f + 1 , raw . length ) , f : f + 1 , t : raw . length - 1 }
318
- }
319
- }
320
- }
321
-
322
-
323
- export function compose < T , K > ( convert : ( a : K ) => T ) {
324
-
325
- return function chain ( ...transformers : ( ( s : T ) => IterableIterator < K > ) [ ] ) {
326
-
327
- function * stream ( data : T , ...fns : ( ( s : T ) => IterableIterator < K > ) [ ] ) {
328
- const [ fn , ...others ] = fns
329
- for ( const elt of fn ( data ) ) {
330
- yield elt
331
- if ( others . length ) {
332
- yield * stream ( convert ( elt ) , ...others )
333
- }
334
- }
335
- }
336
-
337
- return function * activate ( gen : IterableIterator < T > ) : IterableIterator < K > {
338
- for ( const elt of gen ) {
339
- yield * stream ( elt , ...transformers )
340
- }
341
- }
342
- }
343
- }
0 commit comments