|
24 | 24 | task :collect do |
25 | 25 | database = ENV.fetch('DATABASE_URL') |
26 | 26 | db = PG.connect(database) |
27 | | - Dir.glob('*/*/.results/*/**.json').each do |file| |
28 | | - pp file |
29 | | - info = file.split('/') |
30 | | - language = info[0] |
31 | | - framework = info[1] |
32 | | - concurrency = info[3] |
33 | | - res = db.query( |
34 | | - 'INSERT INTO languages (label) VALUES ($1) ON CONFLICT (label) DO UPDATE SET label = $1 RETURNING id', [language] |
35 | | - ) |
36 | | - language_id = res.first['id'] |
| 27 | + Dir.glob('*/*/.results/*/**.json').each do |file| |
| 28 | + info = file.split('/') |
| 29 | + language = info[0] |
| 30 | + framework = info[1] |
| 31 | + concurrency = info[3] |
| 32 | + res = db.query( |
| 33 | + 'INSERT INTO languages (label) VALUES ($1) ON CONFLICT (label) DO UPDATE SET label = $1 RETURNING id', [language] |
| 34 | + ) |
| 35 | + language_id = res.first['id'] |
37 | 36 |
|
38 | | - res = db.query( |
39 | | - 'INSERT INTO frameworks (language_id, label) VALUES ($1, $2) ON CONFLICT (language_id, label) DO UPDATE SET label = $2 RETURNING id', [ |
40 | | - language_id, framework |
41 | | - ] |
42 | | - ) |
43 | | - framework_id = res.first['id'] |
44 | | - data = YAML.load_file(file, symbolize_names: true) |
| 37 | + res = db.query( |
| 38 | + 'INSERT INTO frameworks (language_id, label) VALUES ($1, $2) ON CONFLICT (language_id, label) DO UPDATE SET label = $2 RETURNING id', [ |
| 39 | + language_id, framework |
| 40 | + ] |
| 41 | + ) |
| 42 | + framework_id = res.first['id'] |
| 43 | + data = YAML.load_file(file, symbolize_names: true) |
45 | 44 |
|
46 | | - res = db.query( |
47 | | - 'INSERT INTO concurrencies (level) VALUES ($1) ON CONFLICT (level) DO UPDATE SET level = $1 RETURNING id', [concurrency] |
48 | | - ) |
| 45 | + res = db.query( |
| 46 | + 'INSERT INTO concurrencies (level) VALUES ($1) ON CONFLICT (level) DO UPDATE SET level = $1 RETURNING id', [concurrency] |
| 47 | + ) |
49 | 48 |
|
50 | | - concurrency_level_id = res.first['id'] |
51 | | -results = { |
52 | | - duration_ms: data.dig(:summary, :total) * 1000, |
53 | | - total_requests: -1, |
54 | | - total_requests_per_s: data.dig(:summary, :requestsPerSec), |
55 | | - total_bytes_received: data.dig(:summary, :totalData), |
56 | | - socket_connection_errors: -1, |
57 | | - socket_read_errors: -1, |
58 | | - socket_write_errors: -1, |
59 | | - http_errors: -1, |
60 | | - request_timeouts: -1, |
61 | | - minimum_latency: -1, |
62 | | - average_latency: -1, |
63 | | - standard_deviation: -1, |
64 | | - percentile50: data.dig(:latencyPercentiles, :p50), |
65 | | - percentile75: data.dig(:latencyPercentiles, :p75), |
66 | | - percentile90: data.dig(:latencyPercentiles, :p90), |
67 | | - percentile99:data.dig(:latencyPercentiles, :p95), |
68 | | - 'percentile99999': -1, |
69 | | -} |
70 | | -results.each do |key, value| |
71 | | - insert(db, framework_id, key, value, concurrency_level_id) |
| 49 | + concurrency_level_id = res.first['id'] |
| 50 | + results = { |
| 51 | + duration_ms: data.dig(:summary, :total) * 1000, |
| 52 | + total_requests: -1, |
| 53 | + total_requests_per_s: data.dig(:summary, :requestsPerSec), |
| 54 | + total_bytes_received: data.dig(:summary, :totalData), |
| 55 | + socket_connection_errors: -1, |
| 56 | + socket_read_errors: -1, |
| 57 | + socket_write_errors: -1, |
| 58 | + http_errors: -1, |
| 59 | + request_timeouts: -1, |
| 60 | + minimum_latency: -1, |
| 61 | + average_latency: -1, |
| 62 | + standard_deviation: -1, |
| 63 | + percentile50: data.dig(:latencyPercentiles, :p50), |
| 64 | + percentile75: data.dig(:latencyPercentiles, :p75), |
| 65 | + percentile90: data.dig(:latencyPercentiles, :p90), |
| 66 | + percentile99: data.dig(:latencyPercentiles, :p95), |
| 67 | + percentile99999: -1 |
| 68 | + } |
| 69 | + results.each do |key, value| |
| 70 | + insert(db, framework_id, key, value, concurrency_level_id) |
72 | 71 | end |
73 | | -end |
| 72 | + end |
74 | 73 | db.close |
75 | 74 | end |
0 commit comments