Skip to content

Commit dc94e9c

Browse files
committed
cleaning up code
1 parent a41ddc7 commit dc94e9c

9 files changed

+364
-363
lines changed

src/IToken.ts

+5-11
Original file line numberDiff line numberDiff line change
@@ -10,25 +10,19 @@ export interface INameToken extends IRangeToken {
1010
name: string; // name of token
1111
}
1212

13-
export interface ILines {
14-
_module: IModule; // reference, not a copy
15-
_EOLMarkers: ISimpleToken[];
16-
_vEOLMarkers: ISimpleToken[];
17-
lines(): string[];
18-
line(n: number): string;
19-
vlines(): string[];
20-
vline(n: number): string;
13+
export interface Snippet extends IRangeToken {
14+
line: string;
2115
}
2216

2317
export const isRangeToken = propsExist('f', 't')
2418
export const isSimpleToken = propsExist('f')
2519

26-
export type IToken = ISimpleToken|IRangeToken
20+
export type IToken = ISimpleToken & IRangeToken
2721

28-
export function sortTokenFAscTDesc(t1:IToken, t2:IToken){
22+
export function sortTokenFAscTDesc<T>(t1: any, t2: any) {
2923
if (t1.f > t2.f) return 1
3024
if (t1.f < t2.f) return -1
31-
if (isRangeToken(t1) && isRangeToken(t2)){
25+
if (isRangeToken(t1) && isRangeToken(t2)) {
3226
const t1t = <IRangeToken>t1
3327
const t2t = <IRangeToken>t2
3428
if (t1t < t2t) return 1

src/channel.ts

+55-251
Original file line numberDiff line numberDiff line change
@@ -1,143 +1,86 @@
11
import * as debug from 'debug'
22

3-
import { ISimpleToken, IRangeToken, sortTokenFAscTDesc, IToken, isRangeToken, isSimpleToken } from './IToken';
4-
import { IModule, IModuleEnums } from './module'
5-
import { IMatcher, IMatcherState, createClassMatcher } from './matchers'
6-
import { ITokenEmitter } from './tokenProducers'
7-
import { isContinue, binarySearch, last, propsExist } from './helpers'
8-
import { ws, TestFunc } from './classes'
9-
import { createTokenEmitter, rangeProducer } from './tokenProducers'
10-
import { Stream } from 'stream';
3+
import {
4+
ISimpleToken,
5+
IRangeToken,
6+
sortTokenFAscTDesc,
7+
IToken,
8+
isRangeToken
9+
} from './IToken';
10+
11+
12+
import { last } from './helpers'
13+
import {
14+
chain,
15+
Processed,
16+
createSnippetsUsingTokens
17+
} from './processors'
1118

1219
const printer = debug('IChannel')
1320

14-
export interface linePos {
15-
ln: number;
16-
col: number;
21+
export interface IChannel<T extends ISimpleToken> {
22+
name: string;
23+
tokens: T[];
24+
process();
1725
}
18-
export interface Snippet {
19-
line: string;
20-
f: number;
21-
t: number;
26+
export interface ISpring<T> {
27+
slicers: T[],
28+
data: string
2229
}
2330

24-
export interface Processed {
25-
snippet: Snippet;
26-
token: IRangeToken;
31+
export function tokenAsSimple(res: Processed): ISimpleToken {
32+
return { f: res.token.f }
2733
}
2834

29-
function compare(a: any, b: any): 0 | -1 | 1 {
30-
const df = a.f - b.f
31-
if (df > 0) return 1
32-
if (df < 0) return -1
33-
if (a.t !== undefined && b.t !== undefined) {
34-
const dt = a.t - b.t
35-
if (dt > 0) return 1
36-
if (dt < 0) return -1
37-
}
38-
return 0
39-
}
40-
41-
const search = binarySearch(compare)
42-
const wsMatcher = createClassMatcher(ws, '>1')
43-
const wsEmitter = createTokenEmitter(rangeProducer, wsMatcher)
44-
45-
46-
const regexp = (s: RegExp) => line => line.match(s)
47-
const isComment = line => {
48-
49-
if ('*Cc'.includes(line[0])) {
50-
const found: RegExpMatchArray = [line]
51-
found.index = 0
52-
found.input = line
53-
return found
54-
}
55-
return null
35+
export function tokenAsRange(res: Processed): IRangeToken {
36+
return { f: res.token.f, t: res.token.t }
5637
}
5738

58-
const isNotComment = line => {
59-
if (!isComment(line)) {
60-
const found: RegExpMatchArray = [line]
61-
found.index = 0
62-
found.input = line
63-
return found
64-
}
65-
return null
66-
}
67-
68-
const chain = compose((a: Processed) => a.snippet)
69-
70-
export const processLineContinuation = createProcessor(regexp(/\n\s{5}[^\s]/))
71-
export const processNonComments = createProcessor(isNotComment)
72-
export const processWS = createProcessor(regexp(/[\s\t]+/))
73-
export const processComments = createProcessor(isComment)
74-
75-
76-
77-
export interface IChannel<T extends ISimpleToken> {
78-
mod: IModule;
79-
name: string;
80-
tokens: T[];
81-
process();
82-
}
83-
84-
export function createChannel<T extends ISimpleToken>(name: string) {
85-
return function addTokenMatchers(...te: (ITokenEmitter<T>)[]) {
86-
if (te.length === 0) {
87-
throw new Error(`No token emitter functions specified`)
88-
}
89-
return function setModule(module: IModule): IChannel<T> {
90-
//module,
91-
const channel = {
92-
mod: module,
93-
tokens: [],
94-
name,
95-
process() {
96-
if (!module.state.has(IModuleEnums.loaded)) {
97-
throw new Error(`Module [${module.name}] not loaded`)
98-
}
99-
const raw = module.raw
100-
if (module.raw.length === 0) {
101-
printer(`module [${module.name}] loaded an empty file`)
102-
return
103-
}
104-
this.tokens = [] //clear
105-
for (let i = 0; i < raw.length; i++) {
106-
te.forEach(fn => {
107-
const token = fn(raw[i], i)
108-
if (token) {
109-
this.tokens.push(token)
110-
}
111-
})
39+
export function createChannel<T extends ISimpleToken, K extends ISimpleToken>(name: string, convert: (res: Processed) => T) {
40+
return function slicer(pipe: (Snippet) => IterableIterator<Processed>) {
41+
return function (spring?: () => ISpring<K>) {
42+
return function setModule(raw: string): IChannel<T> {
43+
44+
const tokens: T[] = []
45+
const channel = {
46+
tokens,
47+
name,
48+
process() {
49+
tokens.splice(0)
50+
if (raw.length === 0) {
51+
printer(`channel [${name}] processed an empty file`)
52+
return
53+
}
54+
const { data, slicers } = spring ? spring() : { data: raw, slicers: [{ f: raw.length }] }
55+
for (const processed of pipe(createSnippetsUsingTokens(data, slicers))) {
56+
//console.log(processed)
57+
tokens.push(convert(processed))
58+
}
59+
tokens.sort(sortTokenFAscTDesc)
11260
}
11361
}
62+
return channel
11463
}
115-
module.channels.set(name, channel)
116-
return channel
11764
}
11865
}
11966
}
12067

121-
export function createLogicalEOLChannel<T extends ISimpleToken>(ch: IChannel<T>): IChannel<T> {
12268

123-
if (ch !== ch.mod.channels.get('lf')) {
124-
throw new TypeError(`source "lf" channel is not registered with a module`)
125-
}
126-
let tokens: T[] = []
127-
const vCh: IChannel<T> = {
128-
mod: ch.mod,
69+
export function createLogicalEOLChannel(ch: IChannel<ISimpleToken>, raw:string): IChannel<ISimpleToken> {
70+
71+
let tokens: ISimpleToken[] = []
72+
const vCh: IChannel<ISimpleToken> = {
12973
tokens,
13074
name: 'vlf',
13175
process() {
13276
tokens.splice(0)
13377
const lftok = ch.tokens.slice(0)
134-
const raw = ch.mod.raw
13578
let prev = 0
13679
for (let i = 0; i < lftok.length; i++) {
13780
const pos = ch.tokens[i].f
13881
const line = raw.slice(prev, pos)
13982
prev = pos + 1
140-
if (isContinue(line)) {
83+
if (line.match(/^\s{5}[^\s]/)) {
14184
if (i === 0) {
14285
const err = `first line cannot be continuation: [${line}]`
14386
printer(err)
@@ -147,58 +90,21 @@ export function createLogicalEOLChannel<T extends ISimpleToken>(ch: IChannel<T>)
14790
tokens[tokens.length - 1] = lftok[i]
14891
continue
14992
}
150-
tokens.push(lftok[i])
93+
tokens.push({ f: lftok[i].f })
15194
}
15295
}
15396
}
154-
ch.mod.channels.set(vCh.name, vCh)
15597
return vCh
15698
}
15799

158-
export function createCommentsChannel(ch: IChannel<ISimpleToken>): IChannel<IRangeToken> {
159-
160-
const vlf = ch.mod.channels.get('vlf')
161-
const lf = ch.mod.channels.get('lf')
162-
163-
if (ch !== vlf && ch !== lf) {
164-
throw new TypeError(`source "lf/vlf" channel is not registered with a module`)
165-
}
166-
const _lf = vlf || lf
167-
const tokens: IRangeToken[] = []
168-
const raw = _lf.mod.raw
169-
170-
const pipeLine = chain(processComments)
171-
172-
const comm: IChannel<IRangeToken> = {
173-
mod: ch.mod,
174-
tokens,
175-
name: 'comments',
176-
process() {
177-
tokens.splice(0)
178-
const lftok = _lf.tokens.slice(0) //copy
179-
for (const processed of pipeLine(createSnippetsUsingTokens(raw, lftok))) {
180-
tokens.push(processed.token)
181-
}
182-
}
183-
}
184-
ch.mod.channels.set(comm.name, comm)
185-
return comm
186-
}
187-
188-
export function createChannelExcluding(name: string, ...ch: IChannel<IToken>[]): IChannel<IRangeToken> {
100+
export function createChannelExcluding(name: string, raw: string, ...ch: IChannel<IToken>[]): IChannel<IRangeToken> {
189101

190102
if (ch.length === 0) {
191103
throw new Error(`Illegal Arguments, no arguments given`)
192104
}
193-
const foundErrMod = ch.find(fch => fch.mod !== ch[0].mod)
194-
if (foundErrMod) {
195-
throw new Error(`Channels dont come from the same module`)
196-
}
197105
// merge and sort all the tokens from the channels
198106
const tokens: IRangeToken[] = []
199-
const raw = ch[0].mod.raw
200107
const rc: IChannel<IRangeToken> = {
201-
mod: ch[0].mod,
202108
tokens,
203109
name,
204110
process() {
@@ -236,108 +142,6 @@ export function createChannelExcluding(name: string, ...ch: IChannel<IToken>[]):
236142
}
237143
}
238144
}
239-
ch[0].mod.channels.set(name, rc)
240145
return rc
241146
}
242147

243-
export function createWSChannel(ch: IChannel<IRangeToken>): IChannel<IRangeToken> {
244-
245-
const source = ch.mod.channels.get('source') as IChannel<IRangeToken>
246-
if (source === undefined) {
247-
throw new TypeError(`source "comments" channel is not registered with a module`)
248-
}
249-
const raw = ch.mod.raw
250-
const pipeLine = chain(processLineContinuation, processWS)
251-
const tokens: IRangeToken[] = []
252-
const ws: IChannel<IRangeToken> = {
253-
mod: ch.mod,
254-
tokens, //vtokens
255-
name: 'ws',
256-
process() {
257-
tokens.splice(0)
258-
const tok = source.tokens.slice(0) //copy
259-
for (const processed of pipeLine(createSnippetsUsingTokens(raw, tok))) {
260-
tokens.push(processed.token)
261-
}
262-
tokens.sort((t1, t2) => t1.f - t2.f)
263-
}
264-
}
265-
ch.mod.channels.set(ws.name, ws)
266-
return ws
267-
}
268-
269-
export function createProcessor(matcher: TestFunc) {
270-
271-
return function* processor(s: Snippet): IterableIterator<Processed> {
272-
const { line, f, t } = s
273-
const found = matcher(line)
274-
if (found) {
275-
const first = line.slice(0, found.index)
276-
const second = line.slice(found.index + found[0].length)
277-
yield {
278-
snippet: { line: first, f, t: f + first.length - 1 },
279-
token: { f: f + found.index, t: f + found.index + found[0].length - 1 }
280-
}
281-
if (second) {
282-
yield* processor({ line: second, f: f + found.index + found[0].length, t })
283-
}
284-
}
285-
}
286-
}
287-
288-
function* createSnippetsUsingTokens(raw: string, tokens: (ISimpleToken | IRangeToken)[]): IterableIterator<Snippet> {
289-
if (!(raw || '').trim()) {
290-
return
291-
}
292-
let prev = 0
293-
294-
for (const token of tokens) {
295-
if (isRangeToken(token)) {// range token
296-
const { f, t } = <IRangeToken>token
297-
yield { line: raw.slice(f, t + 1), f, t }
298-
prev = t + 1
299-
}
300-
else if (isSimpleToken(token)) {//simpletoken
301-
const { f } = <ISimpleToken>token
302-
yield { line: raw.slice(prev, f), f: prev, t: f - 1 }
303-
prev = f + 1
304-
305-
}
306-
else {
307-
throw new Error(`token is not a SimpleToken or a RangeToken, i.e: [${JSON.stringify(token)}]`)
308-
}
309-
}
310-
const lastToken = last(tokens)
311-
if (
312-
isSimpleToken(lastToken) //slicer token
313-
|| lastToken === undefined //source code has only one-liner?
314-
) {
315-
const f = lastToken && lastToken.f || 0
316-
if (raw.length - 1 > f) {
317-
yield { line: raw.slice(f + 1, raw.length), f: f + 1, t: raw.length - 1 }
318-
}
319-
}
320-
}
321-
322-
323-
export function compose<T, K>(convert: (a: K) => T) {
324-
325-
return function chain(...transformers: ((s: T) => IterableIterator<K>)[]) {
326-
327-
function* stream(data: T, ...fns: ((s: T) => IterableIterator<K>)[]) {
328-
const [fn, ...others] = fns
329-
for (const elt of fn(data)) {
330-
yield elt
331-
if (others.length) {
332-
yield* stream(convert(elt), ...others)
333-
}
334-
}
335-
}
336-
337-
return function* activate(gen: IterableIterator<T>): IterableIterator<K> {
338-
for (const elt of gen) {
339-
yield* stream(elt, ...transformers)
340-
}
341-
}
342-
}
343-
}

0 commit comments

Comments
 (0)