Skip to content

Commit

Permalink
Merge pull request #1 from sebastialonso/dev
Browse files Browse the repository at this point in the history
v4.0
  • Loading branch information
sebastialonso authored Aug 6, 2018
2 parents c378e36 + 41c2178 commit 65e9919
Show file tree
Hide file tree
Showing 30 changed files with 962 additions and 252 deletions.
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# Graveyard

[![Hex.pm](https://img.shields.io/badge/hex-0.3.0-blue.svg)](https://hex.pm/packages/graveyard)
[![Hex.pm](https://img.shields.io/badge/hex-0.4.0-blue.svg)](https://hex.pm/packages/graveyard)

A teeny-tiny ORM/library for managing ElasticSearch

Expand All @@ -11,7 +11,7 @@ Just add
```elixir
def deps do
[
{:graveyard, "~> 0.3.0"}
{:graveyard, "~> 0.4.0"}
]
end
```
Expand Down
5 changes: 0 additions & 5 deletions config/config.exs
Original file line number Diff line number Diff line change
Expand Up @@ -30,11 +30,6 @@ use Mix.Config
config :tirexs,
:uri, "http://127.0.0.1:9200"

# config :graveyard,
# index: "graveyard",
# type: "graveyard",
# mappings: CustomMappingsForGraveyard

if Mix.env == :test do
import_config "#{Mix.env}.exs"
end
10 changes: 7 additions & 3 deletions lib/graveyard/exceptions.ex
Original file line number Diff line number Diff line change
@@ -1,13 +1,17 @@
defmodule Graveyard.Errors do
defmodule WrongConfigModuleError do
defmodule ConfigModuleError do
defexception message: "Missing function"

def full_message(error) do
"Supplied module has no get_mappings/2 function"
end
end

defmodule NoElasticSearchInstance do
defexception message: "No ElasticSearch found"
defmodule ElasticSearchInstanceError do
defexception message: "No ElasticSearch instance found"
end

defmodule BadArgumentError do
defexception message: "Illegal arguments were supplied"
end
end
61 changes: 27 additions & 34 deletions lib/graveyard/mappings.ex
Original file line number Diff line number Diff line change
Expand Up @@ -5,33 +5,42 @@ defmodule Graveyard.Mappings do
alias Graveyard.Support
alias Graveyard.Errors
alias Graveyard.Utils
alias Graveyard.Mappings.Basic
alias Graveyard.Mappings.Builder
alias Graveyard.Mappings.Auxiliar
alias Graveyard.Utils.TirexsUris
import Tirexs.Index.Settings

@doc """
Returns the mappings from the module indicated in `config.exs`
Returns the mappings object processed from the configured mappings module or the configured map
"""
def get_mappings(index_name \\ Support.index(), type_name \\ Support.type()) do
is_module_present = is_nil(Support.mappings_module) == false
is_map_present = is_nil(Support.mappings) == false

mappings_from_config = cond do
!is_nil(Support.mappings_module) ->
module = Support.mappings_module
try do
module.get_mappings(index_name, type_name)
rescue
e in UndefinedFunctionError ->
raise Errors.WrongConfigModuleError
end
!is_nil(Support.mappings) ->
[]
!is_module_present and !is_map_present ->
raise Errors.ConfigModuleError, "Only one of :mappings or :mappings_module must be set in config"
is_map_present and Enum.count(Support.mappings) > 1 ->
Builder.get_mappings(index_name, type_name)
is_module_present ->
Basic.get_mappings(index_name, type_name)
true ->
raise Errors.WrongConfigModuleError, "Any of :mappings or :mappings_module must be set in config"
raise Errors.ConfigModuleError, "Only one of :mappings or :mappings_module must be set in config"
end

properties_enhanced = mappings_from_config
|> Keyword.fetch!(:mapping)
|> Keyword.fetch!(:properties)
|> Keyword.merge(timestamps())
|> Keyword.merge(Builder.timestamps())
|> Keyword.merge(add_custom_keywords())

properties_enhanced = if is_map_present do
properties_enhanced
|> Keyword.merge(Auxiliar.build_auxiliar_mappings())
else
properties_enhanced
end

Keyword.take(mappings_from_config, [:index, :type]) ++ [mapping: [properties: properties_enhanced]]
end
Expand Down Expand Up @@ -59,11 +68,15 @@ defmodule Graveyard.Mappings do
|> Tirexs.Mapping.create_resource

case base do
:error -> raise Errors.NoElasticSearchInstance
:error -> raise Errors.ElasticSearchInstanceError
_ -> base
end
end

@doc """
To be used when the mappings have changed. It ppdates the current mappings with the new one,
maintaining all records within the index.
"""
def apply_mappings_change() do
temporal_index = [source: [index: Support.index(), type: Support.type()], dest: [index: "tmp", type: Support.type()]]
original_index = [source: [index: "tmp", type: Support.type()], dest: [index: Support.index(), type: Support.type()]]
Expand All @@ -87,26 +100,6 @@ defmodule Graveyard.Mappings do
IO.inspect TirexsUris.delete_mapping("tmp")
end

defp timestamps() do
Utils.to_keyword_list(%{
created_at: graveyard_to_elastic(:datetime),
updated_at: graveyard_to_elastic(:datetime)
})
end

defp graveyard_to_elastic(type) do
case type do
:string -> %{type: "keyword"}
:category -> %{type: "keyword"}
:list -> %{type: "keyword"}
:text -> %{type: "text", analyzer: "nGram_analyzer"}
:date -> %{type: "date"}
:datetime -> %{type: "date", format: "dd/MM/yyyy HH:mm:ss"}
:integer -> %{type: "integer"}
:number -> %{type: "float"}
end
end

defp add_custom_keywords() do
[]
end
Expand Down
63 changes: 63 additions & 0 deletions lib/graveyard/mappings/auxiliar.ex
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
defmodule Graveyard.Mappings.Auxiliar do

alias Graveyard.Support
import Graveyard.Utils

@doc """
Traverses recursively the configured mappings map for :oblists objects.
For every oblist found, the :category and :list fields inside are extracted and turn into an object.
Then a special auxiliar field with this object is added to the mappings.
This is done to make easier the grouping process under nested fields
"""
def build_auxiliar_mappings(config \\ Support.mappings) do
grouped_properties = find_fields_with_schema(config)
|> Enum.group_by(fn(x) ->
x.nested_key
end)

properties = Enum.map(grouped_properties, fn({key, val}) ->
inner_properties = Enum.reduce(val, [], fn(x, acc) ->
acc ++ [{to_indifferent_atom(x.name), [type: "keyword"]}]
end)
{to_indifferent_atom(key), [properties: inner_properties, type: "object"]}
end)
[__aux: [properties: properties, type: "object"]]
end

def find_fields_with_schema(mmap, parent_key \\ "", accumulator \\ []) do
mmap |> Enum.reduce(accumulator, fn({key, val}, acc) ->
if is_map(val) do
if Map.has_key?(val, "schema") do
parent_key = if parent_key == "" do
key
else
Enum.join([parent_key, key], ".")
end
case val["type"] do
:object ->
find_fields_with_schema(val["schema"], parent_key, acc)
:oblist ->
extract_category_list_keys(key, val["schema"], acc)
_ -> acc
end
else
acc
end
else
acc
end
end)
end

def extract_category_list_keys(key, schema, accumulator) do
schema |> Enum.reduce(accumulator, fn({kkey, value}, acc) ->
cond do
Map.has_key?(value, "schema") ->
find_fields_with_schema(%{kkey => value}, key, acc)
Enum.member?([:category, :list], value["type"]) ->
acc ++ [%{nested_key: to_indifferent_atom(key), name: to_indifferent_atom(kkey)}]
true -> acc
end
end)
end
end
17 changes: 17 additions & 0 deletions lib/graveyard/mappings/basic.ex
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
defmodule Graveyard.Mappings.Basic do
@moduledoc """
Builds a mapping object from a user-supplied module
"""
alias Graveyard.Support
alias Graveyard.Errors

def get_mappings(index_name, type_name) do
module = Support.mappings_module
try do
module.get_mappings(index_name, type_name)
rescue
e in UndefinedFunctionError ->
raise Errors.ConfigModuleError
end
end
end
48 changes: 47 additions & 1 deletion lib/graveyard/mappings/builder.ex
Original file line number Diff line number Diff line change
@@ -1,3 +1,49 @@
defmodule Graveyard.Mappings.Builder do

@moduledoc """
Builds a mapping object from a user-supplied map schema using the Graveyard Mapping DSL
"""

alias Graveyard.Support
alias Graveyard.Utils

def get_mappings(index_name, type_name) do
[index: index_name, type: type_name,
mapping: build_recursively(Support.mappings)]
end

defp build_recursively(config) do
[{:properties, Enum.map(config, fn({key, value}) ->
if Map.has_key?(value, "schema") do
{
Utils.to_indifferent_atom(key),
Utils.to_keyword_list(graveyard_to_elastic(value["type"])) ++ build_recursively(value["schema"])
}
else
{Utils.to_indifferent_atom(key), graveyard_to_elastic(value["type"])}
end
end)
}]
end

def timestamps() do
Utils.to_keyword_list(%{
created_at: graveyard_to_elastic(:datetime),
updated_at: graveyard_to_elastic(:datetime)
})
end

defp graveyard_to_elastic(type) do
case type do
:string -> [type: "keyword"]
:category -> [type: "keyword"]
:list -> [type: "keyword"]
:text -> [type: "text", analyzer: "nGram_analyzer"]
:date -> [type: "date", format: "dd/MM/yyyy"]
:datetime -> [type: "date", format: "dd/MM/yyyy HH:mm:ss"]
:integer -> [type: "integer"]
:number -> [type: "float"]
:object -> [type: "object"]
:oblist -> [type: "nested"]
end
end
end
8 changes: 8 additions & 0 deletions lib/graveyard/maquiladoras/find.ex
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
defmodule Graveyard.Maquiladoras.Find do
def maquilate(result) do
result
|> Map.get(:_source)
|> Map.put(:id, result._id)
|> Map.delete(:__aux)
end
end
43 changes: 43 additions & 0 deletions lib/graveyard/maquiladoras/group.ex
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
defmodule Graveyard.Maquiladoras.Group do
def maquilate(results) do
case results do
{:ok, 200, %{aggregations: aggs}} ->
maquilate_recursively(aggs)
|> List.flatten()
{:error, status, reason} ->
{:error, reason}
end
end

defp maquilate_recursively(aggs, parent_bucket \\ %{}) do
do_we_recurse = Map.has_key?(aggs, :aggregation) and Map.has_key?(aggs[:aggregation], :buckets)
case do_we_recurse do
false -> Map.put(parent_bucket, :data, process_leaf(aggs))
true ->
Enum.map(aggs[:aggregation][:buckets], fn(bucket) ->
parent_bucket = if Map.has_key?(parent_bucket, :source) do
new_source = Map.get(parent_bucket, :source) ++ [%{field_name: aggs[:aggregation][:meta][:field_name], value: bucket[:key]}]
Map.put(parent_bucket, :source, new_source)
else
Map.put(parent_bucket, :source, [%{field_name: aggs[:aggregation][:meta][:field_name], value: bucket[:key]}])
end
maquilate_recursively(bucket, parent_bucket)
end)
end
end

defp process_leaf(leaf) do
leaf
|> Map.drop([:from, :to, :key, :doc_count, :key_as_string])
|> clean_leaf_values
end

defp clean_leaf_values(leaf) do
Enum.map(leaf, fn({key, val}) ->
if is_map(val) do
{key, Map.get(val, :value)}
end
end)
|> Enum.into(%{})
end
end
8 changes: 4 additions & 4 deletions lib/graveyard/orm/destroy.ex
Original file line number Diff line number Diff line change
Expand Up @@ -15,21 +15,21 @@ defmodule Graveyard.ORM.Destroy do
%{index: Support.index(), type: Support.type()},
opts
)

# IO.inspect Application.get_all_env(:tirexs)
deleted = Record.find(id)
case delete("#{index}/#{type}/#{id}") do
{:ok, 200, object} ->
deleted
{:error, 404, error} ->
nil
:error ->
IO.inspect "ERROR"
raise Graveyard.Errors.NoElasticSearchInstance
{:error, status, error} ->
IO.inspect(status)
IO.inspect(error)
# Raise some error
{:error, error}
:error ->
IO.inspect "ERROR"
raise Graveyard.Errors.ElasticSearchInstanceError
end
end
end
Expand Down
Loading

0 comments on commit 65e9919

Please sign in to comment.