Skip to content

Commit

Permalink
Fix analytics tests and fix analytics APIs for no indices.
Browse files Browse the repository at this point in the history
- Get all the analytics-related tests working again in this branch of
  merged upgrades/things.
- We had added some tests for the behavior of analytics API queries
  against date ranges without any elasticsearch indices present. Fix
  that behavior in the Rails version of the web-app by handling the lack
  of aggregation results.
- Other test suite fixes.
  • Loading branch information
GUI committed Jun 7, 2018
1 parent c783e1c commit c743e79
Show file tree
Hide file tree
Showing 12 changed files with 150 additions and 129 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
"template": "api-umbrella-logs-v1-*",
"settings": {
"index": {
"number_of_shards": 1,
"number_of_shards": 3,
"codec": "best_compression"
},
"analysis": {
Expand All @@ -27,21 +27,7 @@
"_all": {
"enabled": false
},
"date_detection": false,
"numeric_detection": false,
"dynamic_templates": [
{
"string_template": {
"match": "*",
"match_mapping_type": "string",
"mapping": {
"type": "string",
"index": "analyzed",
"analyzer": "keyword_lowercase"
}
}
}
],
"dynamic": false,
"properties": {
"api_backend_id": {
"type": "string",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
"template": "api-umbrella-logs-v1-*",
"settings": {
"index": {
"number_of_shards": 1,
"number_of_shards": 3,
"codec": "best_compression"
},
"analysis": {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,8 @@
"normalizer": "lowercase_normalizer"
},
"request_ip_city": {
"type": "keyword"
"type": "keyword",
"normalizer": "lowercase_normalizer"
},
"request_ip_country": {
"type": "keyword",
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,12 @@
local cjson = require "cjson"

local path = os.getenv("API_UMBRELLA_SRC_ROOT") .. "/config/elasticsearch_templates_v" .. config["elasticsearch"]["template_version"] .. "_es2x.json"
local path = os.getenv("API_UMBRELLA_SRC_ROOT") .. "/config/elasticsearch_templates_v" .. config["elasticsearch"]["template_version"]
if config["elasticsearch"]["api_version"] >= 5 then
path = path .. "_es5.json"
else
path = path .. "_es1.json"
end

local f, err = io.open(path, "rb")
if err then
ngx.log(ngx.ERR, "failed to open file: ", err)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ def users
@search.aggregate_by_user_stats!(aggregation_options)

@result = @search.result
buckets = @result.aggregations["user_stats"]["buckets"]
buckets = if(@result.aggregations && @result.aggregations["user_stats"]) then @result.aggregations["user_stats"]["buckets"] else [] end
@total = buckets.length

# If we were sorting by one of the facet fields, then the sorting has
Expand Down Expand Up @@ -195,7 +195,9 @@ def users

respond_to do |format|
format.json
format.csv
format.csv do
@filename = "api_users_#{Time.now.utc.strftime("%Y-%m-%d")}.csv"
end
end
end

Expand All @@ -216,7 +218,9 @@ def map

respond_to do |format|
format.json
format.csv
format.csv do
@filename = "api_map_#{Time.now.utc.strftime("%Y-%m-%d")}.csv"
end
end
end

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,9 @@ def drilldown
@result = @search.result

respond_to do |format|
format.csv
format.csv do
@filename = "api_drilldown_#{Time.now.utc.strftime("%Y-%m-%d")}.csv"
end
format.json do
@breadcrumbs = [
:crumb => "All Hosts",
Expand All @@ -51,48 +53,50 @@ def drilldown
:rows => [],
}

@result.aggregations["top_path_hits_over_time"]["buckets"].each do |bucket|
@hits_over_time[:cols] << {
:id => bucket["key"],
:label => bucket["key"].split("/", 2).last,
:type => "number",
}
end

has_other_hits = false
@result.aggregations["hits_over_time"]["buckets"].each_with_index do |total_bucket, index|
cells = [
{ :v => total_bucket["key"], :f => formatted_interval_time(total_bucket["key"]) },
]

path_total_hits = 0
@result.aggregations["top_path_hits_over_time"]["buckets"].each do |path_bucket|
bucket = path_bucket["drilldown_over_time"]["buckets"][index]
cells << { :v => bucket["doc_count"], :f => number_with_delimiter(bucket["doc_count"]) }
path_total_hits += bucket["doc_count"]
if @result.aggregations
@result.aggregations["top_path_hits_over_time"]["buckets"].each do |bucket|
@hits_over_time[:cols] << {
:id => bucket["key"],
:label => bucket["key"].split("/", 2).last,
:type => "number",
}
end

other_hits = total_bucket["doc_count"] - path_total_hits
cells << { :v => other_hits, :f => number_with_delimiter(other_hits) }

@hits_over_time[:rows] << {
:c => cells,
}

if(other_hits > 0)
has_other_hits = true
has_other_hits = false
@result.aggregations["hits_over_time"]["buckets"].each_with_index do |total_bucket, index|
cells = [
{ :v => total_bucket["key"], :f => formatted_interval_time(total_bucket["key"]) },
]

path_total_hits = 0
@result.aggregations["top_path_hits_over_time"]["buckets"].each do |path_bucket|
bucket = path_bucket["drilldown_over_time"]["buckets"][index]
cells << { :v => bucket["doc_count"], :f => number_with_delimiter(bucket["doc_count"]) }
path_total_hits += bucket["doc_count"]
end

other_hits = total_bucket["doc_count"] - path_total_hits
cells << { :v => other_hits, :f => number_with_delimiter(other_hits) }

@hits_over_time[:rows] << {
:c => cells,
}

if(other_hits > 0)
has_other_hits = true
end
end
end

if(has_other_hits)
@hits_over_time[:cols] << {
:id => "other",
:label => "Other",
:type => "number",
}
else
@hits_over_time[:rows].each do |row|
row[:c].slice!(-1)
if(has_other_hits)
@hits_over_time[:cols] << {
:id => "other",
:label => "Other",
:type => "number",
}
else
@hits_over_time[:rows].each do |row|
row[:c].slice!(-1)
end
end
end
end
Expand Down
78 changes: 42 additions & 36 deletions src/api-umbrella/web-app/app/models/log_result/base.rb
Original file line number Diff line number Diff line change
Expand Up @@ -19,36 +19,40 @@ def aggregations
end

def hits_over_time
if(!@hits_over_time && aggregations["hits_over_time"])
unless @hits_over_time
@hits_over_time = {}

aggregations["hits_over_time"]["buckets"].each do |bucket|
@hits_over_time[bucket["key"]] = bucket["doc_count"]
if(aggregations && aggregations["hits_over_time"])
aggregations["hits_over_time"]["buckets"].each do |bucket|
@hits_over_time[bucket["key"]] = bucket["doc_count"]
end
end
end

@hits_over_time
end

def drilldown
if(!@drilldown && aggregations["drilldown"])
unless @drilldown
@drilldown = []

aggregations["drilldown"]["buckets"].each do |bucket|
depth, path = bucket["key"].split("/", 2)
terminal = !path.end_with?("/")

depth = depth.to_i
descendent_depth = depth + 1
descendent_prefix = File.join(descendent_depth.to_s, path)

@drilldown << {
:depth => depth,
:path => path,
:terminal => terminal,
:descendent_prefix => descendent_prefix,
:hits => bucket["doc_count"],
}
if(aggregations && aggregations["drilldown"])
aggregations["drilldown"]["buckets"].each do |bucket|
depth, path = bucket["key"].split("/", 2)
terminal = !path.end_with?("/")

depth = depth.to_i
descendent_depth = depth + 1
descendent_prefix = File.join(descendent_depth.to_s, path)

@drilldown << {
:depth => depth,
:path => path,
:terminal => terminal,
:descendent_prefix => descendent_prefix,
:hits => bucket["doc_count"],
}
end
end
end

Expand Down Expand Up @@ -86,23 +90,25 @@ def cities
unless @cities
@cities = {}

@regions = aggregations["regions"]["buckets"]
if(@search.query[:aggregations][:regions][:terms][:field] == "request_ip_city")
@city_names = @regions.map { |bucket| bucket["key"] }
@cities = {}

if @city_names.any?
cities = LogCityLocation.where(:country => @search.country)
if @search.state
cities = cities.where(:region => @search.state)
end
cities = cities.where(:city.in => @city_names)

cities.each do |city|
@cities[city.city] = {
"lat" => city.location["coordinates"][1],
"lon" => city.location["coordinates"][0],
}
if(aggregations && aggregations["regions"])
@regions = aggregations["regions"]["buckets"]
if(@search.query[:aggregations][:regions][:terms][:field] == "request_ip_city")
@city_names = @regions.map { |bucket| bucket["key"] }
@cities = {}

if @city_names.any?
cities = LogCityLocation.where(:country => @search.country)
if @search.state
cities = cities.where(:region => @search.state)
end
cities = cities.where(:city.in => @city_names)

cities.each do |city|
@cities[city.city] = {
"lat" => city.location["coordinates"][1],
"lon" => city.location["coordinates"][0],
}
end
end
end
end
Expand Down
50 changes: 29 additions & 21 deletions src/api-umbrella/web-app/app/views/admin/stats/map.rabl
Original file line number Diff line number Diff line change
Expand Up @@ -5,32 +5,40 @@ node :region_field do
end

node :regions do
rows = @result.aggregations["regions"]["buckets"].map do |bucket|
{
:id => region_id(bucket["key"]),
:name => region_name(bucket["key"]),
:hits => bucket["doc_count"],
}
end
if(@result.aggregations && @result.aggregations["regions"])
rows = @result.aggregations["regions"]["buckets"].map do |bucket|
{
:id => region_id(bucket["key"]),
:name => region_name(bucket["key"]),
:hits => bucket["doc_count"],
}
end

if(@result.aggregations["missing_regions"]["doc_count"] > 0)
rows << {
:id => "missing",
:name => "Unknown",
:hits => @result.aggregations["missing_regions"]["doc_count"],
}
end
if(@result.aggregations["missing_regions"]["doc_count"] > 0)
rows << {
:id => "missing",
:name => "Unknown",
:hits => @result.aggregations["missing_regions"]["doc_count"],
}
end

rows
rows
else
[]
end
end

node :map_regions do
@result.aggregations["regions"]["buckets"].map do |bucket|
{
:c => region_location_columns(bucket) + [
{ :v => bucket["doc_count"], :f => number_with_delimiter(bucket["doc_count"]) },
]
}
if(@result.aggregations && @result.aggregations["regions"])
@result.aggregations["regions"]["buckets"].map do |bucket|
{
:c => region_location_columns(bucket) + [
{ :v => bucket["doc_count"], :f => number_with_delimiter(bucket["doc_count"]) },
]
}
end
else
[]
end
end

Expand Down
1 change: 1 addition & 0 deletions test/apis/admin/stats/test_logs.rb
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,7 @@ def test_query_builder_case_insensitive_defaults

def test_query_builder_api_key_case_sensitive
FactoryBot.create(:log_item, :request_at => Time.parse("2015-01-16T06:06:28.816Z").utc, :api_key => "AbCDeF", :request_user_agent => unique_test_id)
LogItem.refresh_indices!

response = Typhoeus.get("https://127.0.0.1:9081/admin/stats/logs.json", http_options.deep_merge(admin_session).deep_merge({
:params => {
Expand Down
Loading

0 comments on commit c743e79

Please sign in to comment.