Skip to content

Commit cfabaa5

Browse files
committed
docs(site): fix quickstart and update examples
1 parent 1988cc8 commit cfabaa5

File tree

5 files changed

+85
-97
lines changed

5 files changed

+85
-97
lines changed
Lines changed: 23 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -1,27 +1,24 @@
11
{
2-
"config": {
3-
"connector.class": "io.streamthoughts.kafka.connect.filepulse.source.FilePulseSourceConnector",
4-
"filters": "ParseDelimitedRow, SetTopic, SetKey",
5-
"filters.SetTopic.value": "replace_all(lowercase($.type), '\\s','-')",
6-
"filters.SetTopic.field": "$topic",
7-
"filters.SetTopic.type": "io.streamthoughts.kafka.connect.filepulse.filter.AppendFilter",
8-
"filters.SetKey.value": "{{ lowercase($.artist) }}-{{ lowercase($.title) }}",
9-
"filters.SetKey.field": "$key",
10-
"filters.SetKey.type": "io.streamthoughts.kafka.connect.filepulse.filter.AppendFilter",
11-
"filters.ParseDelimitedRow.extractColumnName": "headers",
12-
"filters.ParseDelimitedRow.trimColumn": "true",
13-
"filters.ParseDelimitedRow.type": "io.streamthoughts.kafka.connect.filepulse.filter.DelimitedRowFilter",
14-
"fs.cleanup.policy.class": "io.streamthoughts.kafka.connect.filepulse.clean.LogCleanupPolicy",
15-
"fs.listing.class": "io.streamthoughts.kafka.connect.filepulse.fs.LocalFSDirectoryListing",
16-
"fs.listing.directory.path": "/tmp/kafka-connect/examples/",
17-
"fs.listing.interval.ms": "10000",
18-
"internal.kafka.reporter.bootstrap.servers": "broker:29092",
19-
"internal.kafka.reporter.topic": "connect-file-pulse-status",
20-
"offset.attributes.string": "name+hash",
21-
"skip.headers": "1",
22-
"topic": "connect-file-pulse-quickstart-csv",
23-
"tasks.reader.class": "io.streamthoughts.kafka.connect.filepulse.fs.reader.LocalRowFileInputReader",
24-
"tasks.max": 1
25-
},
26-
"name": "connect-file-pulse-quickstart-csv"
27-
}
2+
"connector.class": "io.streamthoughts.kafka.connect.filepulse.source.FilePulseSourceConnector",
3+
"filters": "ParseDelimitedRow, SetTopic, SetKey",
4+
"filters.SetTopic.value": "replace_all(lowercase($.type), '\\s','-')",
5+
"filters.SetTopic.field": "$topic",
6+
"filters.SetTopic.type": "io.streamthoughts.kafka.connect.filepulse.filter.AppendFilter",
7+
"filters.SetKey.value": "{{ lowercase($.artist) }}-{{ lowercase($.title) }}",
8+
"filters.SetKey.field": "$key",
9+
"filters.SetKey.type": "io.streamthoughts.kafka.connect.filepulse.filter.AppendFilter",
10+
"filters.ParseDelimitedRow.extractColumnName": "headers",
11+
"filters.ParseDelimitedRow.trimColumn": "true",
12+
"filters.ParseDelimitedRow.type": "io.streamthoughts.kafka.connect.filepulse.filter.DelimitedRowFilter",
13+
"fs.cleanup.policy.class": "io.streamthoughts.kafka.connect.filepulse.clean.LogCleanupPolicy",
14+
"fs.listing.class": "io.streamthoughts.kafka.connect.filepulse.fs.LocalFSDirectoryListing",
15+
"fs.listing.directory.path": "/tmp/kafka-connect/examples/",
16+
"fs.listing.interval.ms": "10000",
17+
"internal.kafka.reporter.bootstrap.servers": "broker:29092",
18+
"internal.kafka.reporter.topic": "connect-file-pulse-status",
19+
"offset.attributes.string": "name+hash",
20+
"skip.headers": "1",
21+
"topic": "connect-file-pulse-quickstart-csv",
22+
"tasks.reader.class": "io.streamthoughts.kafka.connect.filepulse.fs.reader.LocalRowFileInputReader",
23+
"tasks.max": 1
24+
}
Lines changed: 12 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -1,17 +1,14 @@
11
{
2-
"config": {
3-
"connector.class": "io.streamthoughts.kafka.connect.filepulse.source.FilePulseSourceConnector",
4-
"fs.cleanup.policy.class": "io.streamthoughts.kafka.connect.filepulse.clean.LogCleanupPolicy",
5-
"fs.listing.class" : "io.streamthoughts.kafka.connect.filepulse.fs.LocalFSDirectoryListing",
6-
"fs.listing.directory.path":"/tmp/kafka-connect/examples/",
7-
"fs.listing.interval.ms": "10000",
8-
"internal.kafka.reporter.bootstrap.servers": "broker:29092",
9-
"internal.kafka.reporter.topic": "connect-file-pulse-status",
10-
"offset.attributes.string": "name",
11-
"read.max.wait.ms": "5000",
12-
"topic": "connect-file-pulse-quickstart-avro",
13-
"tasks.reader.class": "io.streamthoughts.kafka.connect.filepulse.fs.reader.LocalAvroFileInputReader",
14-
"tasks.max": 1
15-
},
16-
"name": "connect-file-pulse-quickstart-avro"
2+
"connector.class": "io.streamthoughts.kafka.connect.filepulse.source.FilePulseSourceConnector",
3+
"fs.cleanup.policy.class": "io.streamthoughts.kafka.connect.filepulse.clean.LogCleanupPolicy",
4+
"fs.listing.class" : "io.streamthoughts.kafka.connect.filepulse.fs.LocalFSDirectoryListing",
5+
"fs.listing.directory.path":"/tmp/kafka-connect/examples/",
6+
"fs.listing.interval.ms": "10000",
7+
"internal.kafka.reporter.bootstrap.servers": "broker:29092",
8+
"internal.kafka.reporter.topic": "connect-file-pulse-status",
9+
"offset.attributes.string": "name",
10+
"read.max.wait.ms": "5000",
11+
"topic": "connect-file-pulse-quickstart-avro",
12+
"tasks.reader.class": "io.streamthoughts.kafka.connect.filepulse.fs.reader.LocalAvroFileInputReader",
13+
"tasks.max": 1
1714
}
Lines changed: 22 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -1,26 +1,23 @@
11
{
2-
"config": {
3-
"connector.class": "io.streamthoughts.kafka.connect.filepulse.source.FilePulseSourceConnector",
4-
"filters": "ParseDelimitedRow, Drop",
5-
"filters.Drop.if": "{{ equals($value.artist, 'U2') }}",
6-
"filters.Drop.invert": "true",
7-
"filters.Drop.type": "io.streamthoughts.kafka.connect.filepulse.filter.DropFilter",
8-
"filters.ParseDelimitedRow.extractColumnName": "headers",
9-
"filters.ParseDelimitedRow.trimColumn": "true",
10-
"filters.ParseDelimitedRow.type": "io.streamthoughts.kafka.connect.filepulse.filter.DelimitedRowFilter",
11-
"fs.cleanup.policy.class": "io.streamthoughts.kafka.connect.filepulse.clean.LogCleanupPolicy",
12-
"fs.listing.class": "io.streamthoughts.kafka.connect.filepulse.fs.LocalFSDirectoryListing",
13-
"fs.listing.directory.path":"/tmp/kafka-connect/examples/",
14-
"fs.listing.filters":"io.streamthoughts.kafka.connect.filepulse.fs.filter.RegexFileListFilter",
15-
"fs.listing.interval.ms": "10000",
16-
"file.filter.regex.pattern":".*\\.csv$",
17-
"internal.kafka.reporter.bootstrap.servers": "broker:29092",
18-
"internal.kafka.reporter.topic": "connect-file-pulse-status",
19-
"offset.attributes.string": "name+hash",
20-
"skip.headers": "1",
21-
"topic": "connect-file-pulse-quickstart-csv",
22-
"tasks.reader.class": "io.streamthoughts.kafka.connect.filepulse.fs.reader.LocalRowFileInputReader",
23-
"tasks.max": 1
24-
},
25-
"name": "connect-file-pulse-quickstart-csv"
26-
}
2+
"connector.class": "io.streamthoughts.kafka.connect.filepulse.source.FilePulseSourceConnector",
3+
"filters": "ParseDelimitedRow, Drop",
4+
"filters.Drop.if": "{{ equals($value.artist, 'U2') }}",
5+
"filters.Drop.invert": "true",
6+
"filters.Drop.type": "io.streamthoughts.kafka.connect.filepulse.filter.DropFilter",
7+
"filters.ParseDelimitedRow.extractColumnName": "headers",
8+
"filters.ParseDelimitedRow.trimColumn": "true",
9+
"filters.ParseDelimitedRow.type": "io.streamthoughts.kafka.connect.filepulse.filter.DelimitedRowFilter",
10+
"fs.cleanup.policy.class": "io.streamthoughts.kafka.connect.filepulse.clean.LogCleanupPolicy",
11+
"fs.listing.class": "io.streamthoughts.kafka.connect.filepulse.fs.LocalFSDirectoryListing",
12+
"fs.listing.directory.path":"/tmp/kafka-connect/examples/",
13+
"fs.listing.filters":"io.streamthoughts.kafka.connect.filepulse.fs.filter.RegexFileListFilter",
14+
"fs.listing.interval.ms": "10000",
15+
"file.filter.regex.pattern":".*\\.csv$",
16+
"internal.kafka.reporter.bootstrap.servers": "broker:29092",
17+
"internal.kafka.reporter.topic": "connect-file-pulse-status",
18+
"offset.attributes.string": "name+hash",
19+
"skip.headers": "1",
20+
"topic": "connect-file-pulse-quickstart-csv",
21+
"tasks.reader.class": "io.streamthoughts.kafka.connect.filepulse.fs.reader.LocalRowFileInputReader",
22+
"tasks.max": 1
23+
}
Lines changed: 23 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -1,28 +1,25 @@
11
{
2-
"config": {
3-
"connector.class": "io.streamthoughts.kafka.connect.filepulse.source.FilePulseSourceConnector",
4-
"filters": "GroupMultilineException, ParseLog4jLog",
5-
"filters.GroupMultilineException.negate": "false",
6-
"filters.GroupMultilineException.pattern": "^[\\t]",
7-
"filters.GroupMultilineException.type": "io.streamthoughts.kafka.connect.filepulse.filter.MultiRowFilter",
8-
"filters.ParseLog4jLog.match": "%{TIMESTAMP_ISO8601:logdate} %{LOGLEVEL:loglevel} %{GREEDYDATA:message}",
9-
"filters.ParseLog4jLog.overwrite": "message",
10-
"filters.ParseLog4jLog.source": "message",
11-
"filters.ParseLog4jLog.type": "io.streamthoughts.kafka.connect.filepulse.filter.GrokFilter",
12-
"filters.ParseLog4jLog.ignoreFailure": "true",
13-
"fs.cleanup.policy.class": "io.streamthoughts.kafka.connect.filepulse.clean.LogCleanupPolicy",
14-
"fs.listing.class": "io.streamthoughts.kafka.connect.filepulse.fs.LocalFSDirectoryListing",
15-
"fs.listing.directory.path": "/tmp/kafka-connect/examples/",
16-
"fs.listing.filters": "io.streamthoughts.kafka.connect.filepulse.fs.filter.RegexFileListFilter",
17-
"fs.listing.interval.ms": "10000",
18-
"file.filter.regex.pattern":".*\\.log$",
19-
"internal.kafka.reporter.bootstrap.servers": "broker:29092",
20-
"internal.kafka.reporter.topic": "connect-file-pulse-status",
21-
"offset.attributes.string": "name",
22-
"read.max.wait.ms": "5000",
23-
"topic": "connect-file-pulse-quickstart-log4j",
24-
"tasks.reader.class": "io.streamthoughts.kafka.connect.filepulse.fs.reader.LocalRowFileInputReader",
25-
"tasks.max": 1
26-
},
27-
"name": "connect-file-pulse-quickstart-log4j"
2+
"connector.class": "io.streamthoughts.kafka.connect.filepulse.source.FilePulseSourceConnector",
3+
"filters": "GroupMultilineException, ParseLog4jLog",
4+
"filters.GroupMultilineException.negate": "false",
5+
"filters.GroupMultilineException.pattern": "^[\\t]",
6+
"filters.GroupMultilineException.type": "io.streamthoughts.kafka.connect.filepulse.filter.MultiRowFilter",
7+
"filters.ParseLog4jLog.pattern": "%{TIMESTAMP_ISO8601:logdate} %{LOGLEVEL:loglevel} %{GREEDYDATA:message}",
8+
"filters.ParseLog4jLog.overwrite": "message",
9+
"filters.ParseLog4jLog.source": "message",
10+
"filters.ParseLog4jLog.type": "io.streamthoughts.kafka.connect.filepulse.filter.GrokFilter",
11+
"filters.ParseLog4jLog.ignoreFailure": "true",
12+
"fs.cleanup.policy.class": "io.streamthoughts.kafka.connect.filepulse.clean.LogCleanupPolicy",
13+
"fs.listing.class": "io.streamthoughts.kafka.connect.filepulse.fs.LocalFSDirectoryListing",
14+
"fs.listing.directory.path": "/var/log/kafka/",
15+
"fs.listing.filters": "io.streamthoughts.kafka.connect.filepulse.fs.filter.RegexFileListFilter",
16+
"fs.listing.interval.ms": "10000",
17+
"file.filter.regex.pattern":".*\\.log$",
18+
"internal.kafka.reporter.bootstrap.servers": "broker:29092",
19+
"internal.kafka.reporter.topic": "connect-file-pulse-status",
20+
"offset.attributes.string": "name",
21+
"read.max.wait.ms": "5000",
22+
"topic": "connect-file-pulse-quickstart-log4j",
23+
"tasks.reader.class": "io.streamthoughts.kafka.connect.filepulse.fs.reader.LocalRowFileInputReader",
24+
"tasks.max": 1
2825
}

site/content/en/docs/Getting started/_index.md

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -48,9 +48,9 @@ This example starts a new connector instance to parse the Kafka Connect containe
4848
**1 ) Start a new connector instance**
4949

5050
```bash
51-
$ curl -sSL $GITHUB_REPO_MASTER/config/connect-file-pulse-quickstart-log4j.json -o connect-file-pulse-quickstart-log4j.json
51+
$ curl -sSL $GITHUB_REPO_MASTER/examples/connect-file-pulse-quickstart-log4j.json -o connect-file-pulse-quickstart-log4j.json
5252

53-
$ curl -sX POST http://localhost:8083/connectors \
53+
$ curl -sX PUT http://localhost:8083/connectors/connect-file-pulse-quickstart-log4j/config \
5454
-d @connect-file-pulse-quickstart-log4j.json \
5555
--header "Content-Type: application/json" | jq
5656
```
@@ -110,17 +110,17 @@ This example starts a new connector instance that parse a CSV file and filter ro
110110
**1 ) Start a new connector instance**
111111

112112
```bash
113-
$ curl -sSL $GITHUB_REPO_MASTER/config/connect-file-pulse-quickstart-csv.json -o connect-file-pulse-quickstart-csv.json
113+
$ curl -sSL $GITHUB_REPO_MASTER/examples/connect-file-pulse-quickstart-csv.json -o connect-file-pulse-quickstart-csv.json
114114

115-
$ curl -sX POST http://localhost:8083/connectors \
115+
$ curl -sX PUT http://localhost:8083/connectors/connect-file-pulse-quickstart-csv/config \
116116
-d @connect-file-pulse-quickstart-csv.json \
117117
--header "Content-Type: application/json" | jq
118118
```
119119

120120
**2 ) Copy example csv file into container**
121121

122122
```bash
123-
$ curl -sSL $GITHUB_REPO_MASTER/examples/quickstart-musics-dataset.csv -o quickstart-musics-dataset.csv
123+
$ curl -sSL $GITHUB_REPO_MASTER/datasets/quickstart-musics-dataset.csv -o quickstart-musics-dataset.csv
124124
$ docker exec -it connect mkdir -p /tmp/kafka-connect/examples
125125
$ docker cp quickstart-musics-dataset.csv connect://tmp/kafka-connect/examples/quickstart-musics-dataset.csv
126126
```

0 commit comments

Comments
 (0)