Skip to content

Commit 76ad2f4

Browse files
feat: add FluxRecord.row with response data stored in Array (#118)
1 parent d2e7c44 commit 76ad2f4

File tree

6 files changed

+81
-5
lines changed

6 files changed

+81
-5
lines changed

CHANGELOG.md

+3
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,8 @@
11
## 2.8.0 [unreleased]
22

3+
### Features
4+
1. [#118](https://github.com/influxdata/influxdb-client-ruby/pull/118): Added `FluxRecord.row` which stores response data in a array
5+
36
## 2.7.0 [2022-07-29]
47

58
### Features

examples/README.md

+2
Original file line numberDiff line numberDiff line change
@@ -12,3 +12,5 @@
1212

1313
## Others
1414
- [invokable_scripts.rb](invokable_scripts.rb) - How to use Invokable scripts Cloud API to create custom endpoints that query data
15+
- [record_row_example.rb](record_row_example.rb) - How to use `FluxRecord.row` (Array) instead of `FluxRecord.values` (Hash),
16+
in case of duplicity column names

examples/record_row_example.rb

+38
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,38 @@
1+
require 'influxdb-client'
2+
3+
url = 'http://localhost:8086'
4+
token = 'my-token'
5+
bucket = 'my-bucket'
6+
org = 'my-org'
7+
8+
client = InfluxDB2::Client.new(url,
9+
token,
10+
bucket: bucket,
11+
org: org,
12+
precision: InfluxDB2::WritePrecision::NANOSECOND,
13+
use_ssl: false)
14+
15+
# Prepare Data
16+
write_api = client.create_write_api
17+
(1..5).each do |i|
18+
write_api.write(data: "point,table=my-table result=#{i}", bucket: bucket, org: org)
19+
end
20+
21+
# Query data with pivot
22+
query_api = client.create_query_api
23+
query = "from(bucket: \"#{bucket}\") |> range(start: -1m) |> filter(fn: (r) => (r[\"_measurement\"] == \"point\"))
24+
|> pivot(rowKey:[\"_time\"], columnKey: [\"_field\"], valueColumn: \"_value\")"
25+
result = query_api.query(query: query)
26+
27+
# Write data to output
28+
puts '----------------------------------------------- FluxRecord.values ----------------------------------------------'
29+
result[0].records.each do |record|
30+
puts record.values
31+
end
32+
33+
puts '------------------------------------------------- FluxRecord.row -----------------------------------------------'
34+
result[0].records.each do |record|
35+
puts record.row.join(',')
36+
end
37+
38+
client.close!

lib/influxdb2/client/flux_csv_parser.rb

+11-3
Original file line numberDiff line numberDiff line change
@@ -125,8 +125,8 @@ def _parse_line(csv)
125125
token = csv[0]
126126

127127
# start new table
128-
if ((ANNOTATIONS.include? token) && !@start_new_table) ||
129-
(@response_mode == InfluxDB2::FluxResponseMode::ONLY_NAMES && @table.nil?)
128+
if ((ANNOTATIONS.include? token) && !@start_new_table) || (@response_mode ==
129+
InfluxDB2::FluxResponseMode::ONLY_NAMES && @table.nil?)
130130

131131
# Return already parsed DataFrame
132132
@start_new_table = true
@@ -187,6 +187,12 @@ def _add_column_names_and_tags(table, csv)
187187
column.label = csv[i]
188188
i += 1
189189
end
190+
191+
duplicates = table.columns.group_by { :label }.select { |_k, v| v.size > 1 }
192+
193+
warning = "The response contains columns with duplicated names: #{duplicates.keys.join(', ')}
194+
You should use the 'FluxRecord.row to access your data instead of 'FluxRecord.values' hash."
195+
puts warning unless duplicates.empty?
190196
end
191197

192198
def _parse_values(csv)
@@ -234,7 +240,9 @@ def _parse_record(table_index, table, csv)
234240
table.columns.each do |flux_column|
235241
column_name = flux_column.label
236242
str_val = csv[flux_column.index + 1]
237-
record.values[column_name] = _to_value(str_val, flux_column)
243+
value = _to_value(str_val, flux_column)
244+
record.values[column_name] = value
245+
record.row.push(value)
238246
end
239247

240248
record

lib/influxdb2/client/flux_table.rb

+7-2
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@ def initialize
2626
@columns = []
2727
@records = []
2828
end
29+
2930
attr_reader :columns, :records
3031

3132
# A table's group key is subset of the entire columns dataset that assigned to the table.
@@ -46,11 +47,14 @@ def group_key
4647
class FluxRecord
4748
# @param [Integer] table the index of table which contains the record
4849
# @param [Hash] values tuple of values
49-
def initialize(table, values: nil)
50+
# @param [Array] row record columns
51+
def initialize(table, values: nil, row: nil)
5052
@table = table
5153
@values = values || {}
54+
@row = row || []
5255
end
53-
attr_reader :table, :values
56+
57+
attr_reader :table, :values, :row
5458
attr_writer :table
5559

5660
# @return [Time] the inclusive lower time bound of all records
@@ -93,6 +97,7 @@ def initialize(index: nil, label: nil, data_type: nil, group: nil, default_value
9397
@group = group
9498
@default_value = default_value
9599
end
100+
96101
attr_reader :index, :label, :data_type, :group, :default_value
97102
attr_writer :index, :label, :data_type, :group, :default_value
98103
end

test/influxdb/flux_csv_parser_test.rb

+20
Original file line numberDiff line numberDiff line change
@@ -506,4 +506,24 @@ def test_parse_without_datatype
506506
assert_equal '11', tables[0].records[0].values['value1']
507507
assert_equal 'west', tables[0].records[0].values['region']
508508
end
509+
510+
def test_parse_duplicate_column_names
511+
data = '#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,dateTime:RFC3339,string,string,double
512+
#group,false,false,true,true,false,true,true,false
513+
#default,_result,,,,,,,
514+
,result,table,_start,_stop,_time,_measurement,location,result
515+
,,0,2022-09-13T06:14:40.469404272Z,2022-09-13T06:24:40.469404272Z,2022-09-13T06:24:33.746Z,my_measurement,Prague,25.3
516+
,,0,2022-09-13T06:14:40.469404272Z,2022-09-13T06:24:40.469404272Z,2022-09-13T06:24:39.299Z,my_measurement,Prague,25.3
517+
,,0,2022-09-13T06:14:40.469404272Z,2022-09-13T06:24:40.469404272Z,2022-09-13T06:24:40.454Z,my_measurement,Prague,25.3'
518+
519+
tables = InfluxDB2::FluxCsvParser.new(data, stream: false, response_mode: InfluxDB2::FluxResponseMode::ONLY_NAMES)
520+
.parse
521+
.tables
522+
assert_equal 1, tables.size
523+
assert_equal 8, tables[0].columns.size
524+
assert_equal 3, tables[0].records.size
525+
assert_equal 7, tables[0].records[0].values.size
526+
assert_equal 8, tables[0].records[0].row.size
527+
assert_equal 25.3, tables[0].records[0].row[7]
528+
end
509529
end

0 commit comments

Comments
 (0)