1
1
{
2
- "config" : {
3
- "connector.class" : " io.streamthoughts.kafka.connect.filepulse.source.FilePulseSourceConnector" ,
4
- "filters" : " GroupMultilineException, ParseLog4jLog" ,
5
- "filters.GroupMultilineException.negate" : " false" ,
6
- "filters.GroupMultilineException.pattern" : " ^[\\ t]" ,
7
- "filters.GroupMultilineException.type" : " io.streamthoughts.kafka.connect.filepulse.filter.MultiRowFilter" ,
8
- "filters.ParseLog4jLog.match" : " %{TIMESTAMP_ISO8601:logdate} %{LOGLEVEL:loglevel} %{GREEDYDATA:message}" ,
9
- "filters.ParseLog4jLog.overwrite" : " message" ,
10
- "filters.ParseLog4jLog.source" : " message" ,
11
- "filters.ParseLog4jLog.type" : " io.streamthoughts.kafka.connect.filepulse.filter.GrokFilter" ,
12
- "filters.ParseLog4jLog.ignoreFailure" : " true" ,
13
- "fs.cleanup.policy.class" : " io.streamthoughts.kafka.connect.filepulse.clean.LogCleanupPolicy" ,
14
- "fs.listing.class" : " io.streamthoughts.kafka.connect.filepulse.fs.LocalFSDirectoryListing" ,
15
- "fs.listing.directory.path" : " /tmp/kafka-connect/examples/" ,
16
- "fs.listing.filters" : " io.streamthoughts.kafka.connect.filepulse.fs.filter.RegexFileListFilter" ,
17
- "fs.listing.interval.ms" : " 10000" ,
18
- "file.filter.regex.pattern" :" .*\\ .log$" ,
19
- "internal.kafka.reporter.bootstrap.servers" : " broker:29092" ,
20
- "internal.kafka.reporter.topic" : " connect-file-pulse-status" ,
21
- "offset.attributes.string" : " name" ,
22
- "read.max.wait.ms" : " 5000" ,
23
- "topic" : " connect-file-pulse-quickstart-log4j" ,
24
- "tasks.reader.class" : " io.streamthoughts.kafka.connect.filepulse.fs.reader.LocalRowFileInputReader" ,
25
- "tasks.max" : 1
26
- },
27
- "name" : " connect-file-pulse-quickstart-log4j"
2
+ "connector.class" : " io.streamthoughts.kafka.connect.filepulse.source.FilePulseSourceConnector" ,
3
+ "filters" : " GroupMultilineException, ParseLog4jLog" ,
4
+ "filters.GroupMultilineException.negate" : " false" ,
5
+ "filters.GroupMultilineException.pattern" : " ^[\\ t]" ,
6
+ "filters.GroupMultilineException.type" : " io.streamthoughts.kafka.connect.filepulse.filter.MultiRowFilter" ,
7
+ "filters.ParseLog4jLog.pattern" : " %{TIMESTAMP_ISO8601:logdate} %{LOGLEVEL:loglevel} %{GREEDYDATA:message}" ,
8
+ "filters.ParseLog4jLog.overwrite" : " message" ,
9
+ "filters.ParseLog4jLog.source" : " message" ,
10
+ "filters.ParseLog4jLog.type" : " io.streamthoughts.kafka.connect.filepulse.filter.GrokFilter" ,
11
+ "filters.ParseLog4jLog.ignoreFailure" : " true" ,
12
+ "fs.cleanup.policy.class" : " io.streamthoughts.kafka.connect.filepulse.clean.LogCleanupPolicy" ,
13
+ "fs.listing.class" : " io.streamthoughts.kafka.connect.filepulse.fs.LocalFSDirectoryListing" ,
14
+ "fs.listing.directory.path" : " /var/log/kafka/" ,
15
+ "fs.listing.filters" : " io.streamthoughts.kafka.connect.filepulse.fs.filter.RegexFileListFilter" ,
16
+ "fs.listing.interval.ms" : " 10000" ,
17
+ "file.filter.regex.pattern" :" .*\\ .log$" ,
18
+ "internal.kafka.reporter.bootstrap.servers" : " broker:29092" ,
19
+ "internal.kafka.reporter.topic" : " connect-file-pulse-status" ,
20
+ "offset.attributes.string" : " name" ,
21
+ "read.max.wait.ms" : " 5000" ,
22
+ "topic" : " connect-file-pulse-quickstart-log4j" ,
23
+ "tasks.reader.class" : " io.streamthoughts.kafka.connect.filepulse.fs.reader.LocalRowFileInputReader" ,
24
+ "tasks.max" : 1
28
25
}
0 commit comments