superstudio 0.8.2102
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/LICENSE +621 -0
- data/README.md +17 -0
- data/ext/superstudio/extconf.rb +6 -0
- data/ext/superstudio/finalize.h +11 -0
- data/ext/superstudio/fnv_64.c +97 -0
- data/ext/superstudio/fnv_64.h +12 -0
- data/ext/superstudio/hash_linked_list.c +222 -0
- data/ext/superstudio/hash_linked_list.h +38 -0
- data/ext/superstudio/json_builder.c +501 -0
- data/ext/superstudio/json_builder.h +246 -0
- data/ext/superstudio/json_object_array.c +409 -0
- data/ext/superstudio/json_object_array.h +34 -0
- data/ext/superstudio/json_single_object.c +320 -0
- data/ext/superstudio/json_single_object.h +31 -0
- data/ext/superstudio/json_value.c +113 -0
- data/ext/superstudio/json_value.h +19 -0
- data/ext/superstudio/json_value_array.c +157 -0
- data/ext/superstudio/json_value_array.h +23 -0
- data/ext/superstudio/jsonbroker.c +334 -0
- data/ext/superstudio/jsonbroker.h +12 -0
- data/ext/superstudio/ss_alloc.c +27 -0
- data/ext/superstudio/ss_alloc.h +11 -0
- data/ext/superstudio/superstudio.c +10 -0
- data/ext/superstudio/superstudio.h +9 -0
- data/lib/generators/superstudio/schema_generator.rb +65 -0
- data/lib/generators/superstudio/schema_map_generator.rb +60 -0
- data/lib/superstudio.rb +109 -0
- data/lib/superstudio/schema_internal_definer.rb +210 -0
- data/lib/superstudio/schema_interpreter.rb +127 -0
- data/lib/superstudio/schema_reader.rb +51 -0
- data/lib/superstudio/superstudio.so +0 -0
- metadata +76 -0
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
#include "ss_alloc.h"
|
|
2
|
+
|
|
3
|
+
void *ss_alloc(SSMemoryStack* memory_stack, size_t multiple, size_t size) {
|
|
4
|
+
SSMemoryStackNode* new_node = calloc(1, sizeof(SSMemoryStackNode));
|
|
5
|
+
new_node->memory_location = calloc(multiple, size);
|
|
6
|
+
new_node->previous_node = memory_stack->stack_top;
|
|
7
|
+
|
|
8
|
+
memory_stack->stack_top = new_node;
|
|
9
|
+
|
|
10
|
+
return memory_stack->stack_top->memory_location;
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
void collapse_stack(SSMemoryStack* memory_stack)
|
|
14
|
+
{
|
|
15
|
+
SSMemoryStackNode* temp_previous;
|
|
16
|
+
while (memory_stack->stack_top->memory_location)
|
|
17
|
+
{
|
|
18
|
+
temp_previous = memory_stack->stack_top->previous_node;
|
|
19
|
+
|
|
20
|
+
free(memory_stack->stack_top->memory_location);
|
|
21
|
+
memory_stack->stack_top->memory_location = NULL;
|
|
22
|
+
free(memory_stack->stack_top);
|
|
23
|
+
memory_stack->stack_top = NULL;
|
|
24
|
+
|
|
25
|
+
memory_stack->stack_top = temp_previous;
|
|
26
|
+
}
|
|
27
|
+
}
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
#ifndef SS_ALLOC_
|
|
2
|
+
#define SS_ALLOC_
|
|
3
|
+
|
|
4
|
+
#include <stddef.h>
|
|
5
|
+
#include <stdlib.h>
|
|
6
|
+
#include "json_builder.h"
|
|
7
|
+
|
|
8
|
+
void *ss_alloc(SSMemoryStack* memory_stack, size_t multiple, size_t size);
|
|
9
|
+
void collapse_stack(SSMemoryStack* memory_stack);
|
|
10
|
+
|
|
11
|
+
#endif // SS_ALLOC_
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
require 'rails/generators'
|
|
2
|
+
require 'rails/generators/base'
|
|
3
|
+
|
|
4
|
+
module Superstudio
|
|
5
|
+
module Generators
|
|
6
|
+
class SchemaGenerator < Rails::Generators::NamedBase
|
|
7
|
+
include Superstudio::SchemaReader
|
|
8
|
+
|
|
9
|
+
argument :file_arg, type: 'string', required: false
|
|
10
|
+
|
|
11
|
+
source_root File.expand_path("../../templates", __FILE__)
|
|
12
|
+
|
|
13
|
+
desc "Creates a json schema (draft v4) for all the columns in a database table."
|
|
14
|
+
|
|
15
|
+
def create_schema_file
|
|
16
|
+
model_klass = name.classify.constantize
|
|
17
|
+
model_columns = {}
|
|
18
|
+
|
|
19
|
+
model_klass.columns.each do |column|
|
|
20
|
+
column_type = column.type
|
|
21
|
+
column_type = :string if [:datetime].include? column.type
|
|
22
|
+
column_type = :number if [:decimal].include? column.type
|
|
23
|
+
model_columns[column.name] = column_type
|
|
24
|
+
end
|
|
25
|
+
|
|
26
|
+
file_data = template_header
|
|
27
|
+
model_columns.each do |name, type|
|
|
28
|
+
file_data << data_column(name, type)
|
|
29
|
+
end
|
|
30
|
+
|
|
31
|
+
file_data = file_data.chomp(",")
|
|
32
|
+
file_data << template_footer
|
|
33
|
+
|
|
34
|
+
if file_arg.nil?
|
|
35
|
+
model_klass_name = model_klass.name.gsub(":", "").underscore
|
|
36
|
+
else
|
|
37
|
+
model_klass_name = file_arg
|
|
38
|
+
end
|
|
39
|
+
|
|
40
|
+
create_file "#{schemas_directory}/#{model_klass_name}.json.schema", file_data
|
|
41
|
+
end
|
|
42
|
+
|
|
43
|
+
private
|
|
44
|
+
def template_header
|
|
45
|
+
%Q({
|
|
46
|
+
"$schema": "http://json-schema.org/draft-04/schema#",
|
|
47
|
+
"type": "object",
|
|
48
|
+
"properties": {)
|
|
49
|
+
end
|
|
50
|
+
|
|
51
|
+
def data_column(name, type)
|
|
52
|
+
%Q(
|
|
53
|
+
"#{name}": {
|
|
54
|
+
"type": "#{type}"
|
|
55
|
+
},)
|
|
56
|
+
end
|
|
57
|
+
|
|
58
|
+
def template_footer
|
|
59
|
+
%Q(},
|
|
60
|
+
"required": ["id"]
|
|
61
|
+
})
|
|
62
|
+
end
|
|
63
|
+
end
|
|
64
|
+
end
|
|
65
|
+
end
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
require 'rails/generators'
|
|
2
|
+
require 'rails/generators/base'
|
|
3
|
+
|
|
4
|
+
module Superstudio
|
|
5
|
+
module Generators
|
|
6
|
+
class SchemaMapGenerator < Rails::Generators::NamedBase
|
|
7
|
+
include Superstudio::SchemaReader
|
|
8
|
+
|
|
9
|
+
argument :class_arg, type: 'string', required: false
|
|
10
|
+
|
|
11
|
+
source_root File.expand_path("../../templates", __FILE__)
|
|
12
|
+
|
|
13
|
+
desc "Creates a simple json schema mapping class, inferring required json nodes and required columns from a json schema (draft v4)."
|
|
14
|
+
|
|
15
|
+
def create_map_file
|
|
16
|
+
file_name = name
|
|
17
|
+
file_name = file_name << ".json.schema" unless name.end_with?(".json.schema")
|
|
18
|
+
|
|
19
|
+
if class_arg.nil?
|
|
20
|
+
class_name = file_name.chomp(".json.schema").classify
|
|
21
|
+
else
|
|
22
|
+
class_name = class_arg.classify
|
|
23
|
+
end
|
|
24
|
+
|
|
25
|
+
interpreted_schema = Superstudio::SqlJsonBuilder.new(nil, file_name)
|
|
26
|
+
|
|
27
|
+
file_data = %Q(=begin
|
|
28
|
+
Interpreted schema has the following data bodies. Multiple bodies indicates array nesting - non-included node names should not be included in @json_nodes because they are placeholders for array inserts.
|
|
29
|
+
)
|
|
30
|
+
|
|
31
|
+
interpreted_hashes = []
|
|
32
|
+
|
|
33
|
+
interpreted_schema.template_bodies.each do |key, template|
|
|
34
|
+
inter = JSON.parse("{" << template.slice(1..template.length).chomp("}").gsub('{', '"').gsub('}', '"') << "}")
|
|
35
|
+
temp_string = PP.pp(inter, '')
|
|
36
|
+
interpreted_hashes << inter
|
|
37
|
+
file_data << %Q(
|
|
38
|
+
Node Path: #{key.join("->")}
|
|
39
|
+
#{temp_string})
|
|
40
|
+
end
|
|
41
|
+
file_data << %Q(=end)
|
|
42
|
+
|
|
43
|
+
file_data << %Q(
|
|
44
|
+
class #{class_name}Mapper < Superstudio::SqlJsonBuilder
|
|
45
|
+
def map_row)
|
|
46
|
+
interpreted_hashes.each do |hash|
|
|
47
|
+
hash.each do |name, node|
|
|
48
|
+
file_data << %Q(
|
|
49
|
+
@json_nodes[:#{node}] = value_by_column_name("#{name}"))
|
|
50
|
+
end
|
|
51
|
+
end
|
|
52
|
+
|
|
53
|
+
file_data << %Q(
|
|
54
|
+
end
|
|
55
|
+
end)
|
|
56
|
+
create_file "#{schema_maps_directory}/#{class_name.underscore}_mapper.rb", file_data
|
|
57
|
+
end
|
|
58
|
+
end
|
|
59
|
+
end
|
|
60
|
+
end
|
data/lib/superstudio.rb
ADDED
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
require 'superstudio/superstudio'
|
|
2
|
+
require 'superstudio/schema_interpreter'
|
|
3
|
+
require 'superstudio/schema_reader'
|
|
4
|
+
require 'superstudio/schema_internal_definer'
|
|
5
|
+
|
|
6
|
+
module Superstudio
|
|
7
|
+
class SqlJsonBuilder
|
|
8
|
+
include Superstudio::SchemaReader
|
|
9
|
+
include Superstudio::SchemaInterpreter
|
|
10
|
+
include Superstudio::SchemaInternalDefiner
|
|
11
|
+
|
|
12
|
+
attr_accessor :sql_columns, :json_result, :schema, :template_types, :array_paths, :json_nodes, :required_columns, :human_to_internal, :template_bodies
|
|
13
|
+
|
|
14
|
+
def initialize(query, file_name = nil)
|
|
15
|
+
file_class_name = self.class.name
|
|
16
|
+
file_class_name.slice!("Mapper")
|
|
17
|
+
file_name ||= file_class_name.underscore << ".json.schema"
|
|
18
|
+
|
|
19
|
+
@schema = read_schema(file_name)
|
|
20
|
+
|
|
21
|
+
@sql_columns, @row_being_used = [], []
|
|
22
|
+
@json_result = ""
|
|
23
|
+
@json_nodes, @required_columns = {}, {}
|
|
24
|
+
|
|
25
|
+
@unique_threes_tags, @human_readable_tags, @internal_use_tags, @quoted_tags, @do_not_hash, @depth_tags, @real_depth_tags, @column_names = [], [], [], [], [], [], [], []
|
|
26
|
+
@type_2_paths, @type_3_paths, @type_4_paths, @type_5_paths = [], [], [], []
|
|
27
|
+
@type_2_indicator_names, @type_4_indicator_names = ["root"], []
|
|
28
|
+
@unique_threes_paths = []
|
|
29
|
+
|
|
30
|
+
json_schema_interpretation = interpret_json_schema(@schema)
|
|
31
|
+
|
|
32
|
+
if query.present?
|
|
33
|
+
result_set = get_sql_results(query)
|
|
34
|
+
set_human_to_internal_mappings(json_schema_interpretation)
|
|
35
|
+
assemble_json(result_set)
|
|
36
|
+
else
|
|
37
|
+
create_template(json_schema_interpretation)
|
|
38
|
+
end
|
|
39
|
+
end
|
|
40
|
+
|
|
41
|
+
private
|
|
42
|
+
def get_sql_results(query)
|
|
43
|
+
query = query.to_sql if query.respond_to?(:to_sql)
|
|
44
|
+
result_set = ActiveRecord::Base.connection.select_all(query)
|
|
45
|
+
end
|
|
46
|
+
|
|
47
|
+
def value_by_column_name(name)
|
|
48
|
+
value = @row_being_used[@sql_columns.index(name)]
|
|
49
|
+
value ||= ""
|
|
50
|
+
return value
|
|
51
|
+
end
|
|
52
|
+
|
|
53
|
+
def value_by_column_number(number)
|
|
54
|
+
value = @row_being_used[number]
|
|
55
|
+
value ||= ""
|
|
56
|
+
return value
|
|
57
|
+
end
|
|
58
|
+
|
|
59
|
+
def create_internal_row
|
|
60
|
+
working_row = []
|
|
61
|
+
|
|
62
|
+
@human_readable_tags.each_with_index do |value, index|
|
|
63
|
+
working_row << (@json_nodes[value.to_sym].to_s)
|
|
64
|
+
end
|
|
65
|
+
return working_row
|
|
66
|
+
end
|
|
67
|
+
|
|
68
|
+
def assemble_json(result_set)
|
|
69
|
+
@sql_columns = result_set.columns
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
broker = Superstudio::JsonBroker.new()
|
|
73
|
+
broker.set_row_count(result_set.count)
|
|
74
|
+
broker.set_mapper(@internal_use_tags)
|
|
75
|
+
broker.set_quotes(@quoted_tags)
|
|
76
|
+
broker.set_depths(@depth_tags, @real_depth_tags)
|
|
77
|
+
broker.set_hashing(@do_not_hash)
|
|
78
|
+
broker.set_column_names(@column_names)
|
|
79
|
+
broker.set_repeating_arrays(@unique_threes_tags)
|
|
80
|
+
broker.set_single_node_names(@type_2_indicator_names)
|
|
81
|
+
broker.set_array_node_names(@type_4_indicator_names)
|
|
82
|
+
|
|
83
|
+
# p "@internal_use_tags: #{@internal_use_tags}"
|
|
84
|
+
# p "@quoted_tags: #{@quoted_tags}"
|
|
85
|
+
# p "@depth_tags: #{@depth_tags}"
|
|
86
|
+
# p "@real_depth_tags: #{@real_depth_tags}"
|
|
87
|
+
# p "@do_not_hash: #{@do_not_hash}"
|
|
88
|
+
# p "@sql_columns: #{@sql_columns}"
|
|
89
|
+
# p "@column_names: #{@column_names}"
|
|
90
|
+
# p "@unique_threes_tags: #{@unique_threes_tags}"
|
|
91
|
+
# p "@type_2_indicator_names: #{@type_2_indicator_names}"
|
|
92
|
+
# p "@type_4_indicator_names: #{@type_4_indicator_names}"
|
|
93
|
+
# p "@type_2_paths: #{@type_2_paths}"
|
|
94
|
+
# p "@type_3_paths: #{@type_3_paths}"
|
|
95
|
+
# p "@type_4_paths: #{@type_4_paths}"
|
|
96
|
+
|
|
97
|
+
result_set.rows.each do |row|
|
|
98
|
+
@row_being_used = row
|
|
99
|
+
@json_nodes = {}
|
|
100
|
+
map_row
|
|
101
|
+
working_row = create_internal_row()
|
|
102
|
+
broker.consume_row(working_row)
|
|
103
|
+
end
|
|
104
|
+
|
|
105
|
+
@json_result = broker.finalize_json
|
|
106
|
+
end
|
|
107
|
+
|
|
108
|
+
end
|
|
109
|
+
end
|
|
@@ -0,0 +1,210 @@
|
|
|
1
|
+
module Superstudio
|
|
2
|
+
module SchemaInternalDefiner
|
|
3
|
+
def set_human_to_internal_mappings(expected_mappings)
|
|
4
|
+
fork_nodes = expected_mappings.uniq { |i| i[:path] }
|
|
5
|
+
max_depth = expected_mappings.max_by { |x| x[:depth] }[:depth]
|
|
6
|
+
internal_fork_numbers = convert_fork_paths_to_base_route_numbers(fork_nodes, max_depth)
|
|
7
|
+
depth_counter = 0
|
|
8
|
+
while depth_counter <= max_depth
|
|
9
|
+
# Assign all internal numbers to their respective human-readable forms
|
|
10
|
+
objects_at_depth = expected_mappings.select { |j| j[:depth] == depth_counter }
|
|
11
|
+
forks_at_depth = internal_fork_numbers.select { |k,v| v[:depth] == depth_counter }
|
|
12
|
+
type_5_already_found, type_3_already_found = [], []
|
|
13
|
+
type_1_count, type_3_count, type_5_count = 0, 0, 0
|
|
14
|
+
|
|
15
|
+
objects_at_depth.each do |candidate|
|
|
16
|
+
# ex. {depth: 1, path: ["root"], name: "id", node_type: "integer"}
|
|
17
|
+
|
|
18
|
+
# This isn't going to work properly for value arrays
|
|
19
|
+
if @type_3_paths.include?(candidate[:path])
|
|
20
|
+
@column_names << candidate[:path].last
|
|
21
|
+
else
|
|
22
|
+
@column_names << candidate[:name]
|
|
23
|
+
end
|
|
24
|
+
# byebug
|
|
25
|
+
|
|
26
|
+
forks_at_depth.each do |p|
|
|
27
|
+
if candidate[:path] == p[0]
|
|
28
|
+
# Add the internal path to the internal mapping
|
|
29
|
+
if @type_5_paths.include?(p[0])
|
|
30
|
+
type_5_count += 1
|
|
31
|
+
handle_array_stop_path(type_5_already_found, p[0], p[1][:internal_path], "#{candidate[:path].join("_B_")}_A_#{candidate[:name]}", candidate[:node_type], p[1][:internal_path], "5.#{type_5_count}", candidate[:depth])
|
|
32
|
+
elsif @type_3_paths.include?(p[0])
|
|
33
|
+
type_3_count += 1
|
|
34
|
+
|
|
35
|
+
handle_array_stop_path(type_3_already_found, p[0], p[1][:internal_path], "#{candidate[:path].join("_B_")}_A_#{candidate[:name]}", candidate[:node_type], p[1][:internal_path], "3.#{type_3_count}", candidate[:depth])
|
|
36
|
+
else
|
|
37
|
+
type_1_count += 1
|
|
38
|
+
add_to_describe_arrays("#{candidate[:path].join("_B_")}_P_#{candidate[:name]}", p[1][:internal_path], "1.#{type_1_count}", candidate[:node_type], candidate[:depth])
|
|
39
|
+
end
|
|
40
|
+
end
|
|
41
|
+
end
|
|
42
|
+
# Unless there are no forks for this depth, then we're at the root node, and we should do this differently
|
|
43
|
+
if !forks_at_depth.present?
|
|
44
|
+
# Check the type, increment, send
|
|
45
|
+
if @type_5_paths.include?(p[0])
|
|
46
|
+
|
|
47
|
+
handle_array_stop_path(type_5_already_found, p[0], p[1][:internal_path], "#{candidate[:path].join("_B_")}_A_#{candidate[:name]}", candidate[:node_type], "", "5.#{type_5_count}", candidate[:depth])
|
|
48
|
+
elsif @type_3_paths.include?(p[0])
|
|
49
|
+
|
|
50
|
+
handle_array_stop_path(type_3_already_found, p[0], p[1][:internal_path], "#{candidate[:path].join("_B_")}_A_#{candidate[:name]}", candidate[:node_type], "", "3.#{type_3_count}", candidate[:depth])
|
|
51
|
+
else
|
|
52
|
+
type_1_count += 1
|
|
53
|
+
add_to_describe_arrays("#{candidate[:path].join("_B_")}_P_#{candidate[:name]}", "", "1.#{type_1_count}", candidate[:node_type], candidate[:depth])
|
|
54
|
+
end
|
|
55
|
+
end
|
|
56
|
+
end
|
|
57
|
+
depth_counter += 1
|
|
58
|
+
end
|
|
59
|
+
end
|
|
60
|
+
|
|
61
|
+
def handle_array_stop_path(type_already_found, path, internal_use_tag, human_readable_tag, node_type, parent_path, item_path, depth)
|
|
62
|
+
if type_already_found.include?(path)
|
|
63
|
+
# Do nothing.
|
|
64
|
+
else
|
|
65
|
+
type_already_found << path
|
|
66
|
+
@internal_use_tags << internal_use_tag
|
|
67
|
+
@human_readable_tags << human_readable_tag
|
|
68
|
+
end
|
|
69
|
+
|
|
70
|
+
if @unique_threes_paths.include?(path)
|
|
71
|
+
@unique_threes_tags << 0
|
|
72
|
+
else
|
|
73
|
+
@unique_threes_tags << 1
|
|
74
|
+
end
|
|
75
|
+
|
|
76
|
+
if node_type == "string"
|
|
77
|
+
@quoted_tags << 1
|
|
78
|
+
else
|
|
79
|
+
@quoted_tags << 0
|
|
80
|
+
end
|
|
81
|
+
|
|
82
|
+
@do_not_hash << 1
|
|
83
|
+
|
|
84
|
+
if parent_path.present?
|
|
85
|
+
item_string = "#{parent_path}-#{item_path}"
|
|
86
|
+
# Count all of the 4s in the internal string, but remove those that are a count of a type
|
|
87
|
+
# @real_depth_tags << (item_string.scan(/4/).count - item_string.scan(/\.4/).count)
|
|
88
|
+
@real_depth_tags << (item_string.scan(/4./).count)
|
|
89
|
+
else
|
|
90
|
+
@real_depth_tags << 0
|
|
91
|
+
end
|
|
92
|
+
|
|
93
|
+
@depth_tags << (depth - 1) # going to have to fix - appears too deep for type 3s
|
|
94
|
+
end
|
|
95
|
+
|
|
96
|
+
def add_to_describe_arrays(human_route, parent_path, item_path, node_type, depth)
|
|
97
|
+
@unique_threes_tags << 1 # This isn't a type 3
|
|
98
|
+
@human_readable_tags << human_route
|
|
99
|
+
@depth_tags << (depth - 1)
|
|
100
|
+
item_string = item_path
|
|
101
|
+
|
|
102
|
+
if node_type == "string"
|
|
103
|
+
@quoted_tags << 1
|
|
104
|
+
else
|
|
105
|
+
@quoted_tags << 0
|
|
106
|
+
end
|
|
107
|
+
|
|
108
|
+
if parent_path.present?
|
|
109
|
+
item_string = "#{parent_path}-#{item_path}"
|
|
110
|
+
@internal_use_tags << item_string
|
|
111
|
+
# Count all of the 4s in the internal string, but remove those that are a count of a type
|
|
112
|
+
# @real_depth_tags << (item_string.scan(/4/).count - item_string.scan(/\.4/).count)
|
|
113
|
+
@real_depth_tags << (item_string.scan(/4./).count)
|
|
114
|
+
else
|
|
115
|
+
@internal_use_tags << item_string
|
|
116
|
+
@real_depth_tags << 0
|
|
117
|
+
end
|
|
118
|
+
|
|
119
|
+
if item_string.chr == '3'
|
|
120
|
+
@do_not_hash << 1
|
|
121
|
+
else
|
|
122
|
+
@do_not_hash << 0
|
|
123
|
+
end
|
|
124
|
+
end
|
|
125
|
+
|
|
126
|
+
def convert_fork_paths_to_base_route_numbers(fork_nodes, max_depth)
|
|
127
|
+
depth_counter = 0
|
|
128
|
+
internal_fork_numbers = {}
|
|
129
|
+
two_at_depth_counter, three_at_depth_counter, four_at_depth_counter, five_at_depth_counter = 0, 0, 0, 0
|
|
130
|
+
|
|
131
|
+
while depth_counter <= max_depth
|
|
132
|
+
# two_at_depth_counter, three_at_depth_counter, four_at_depth_counter, five_at_depth_counter = 0, 0, 0, 0
|
|
133
|
+
objects_at_depth = fork_nodes.select {|j| j[:depth] == depth_counter}
|
|
134
|
+
possible_parents = fork_nodes.select {|j| j[:depth] == (depth_counter - 1)}
|
|
135
|
+
|
|
136
|
+
objects_at_depth.each do |o|
|
|
137
|
+
if o[:path].join("_B_") == 'root'
|
|
138
|
+
internal_fork_numbers[o[:path]] = { internal_path: '', depth: o[:depth] }
|
|
139
|
+
break
|
|
140
|
+
end
|
|
141
|
+
|
|
142
|
+
if @type_2_paths.include?(o[:path])
|
|
143
|
+
two_at_depth_counter += 1
|
|
144
|
+
generate_internal_fork_number_of_type(internal_fork_numbers, possible_parents, o, 2, two_at_depth_counter)
|
|
145
|
+
end
|
|
146
|
+
|
|
147
|
+
if @type_4_paths.include?(o[:path])
|
|
148
|
+
four_at_depth_counter += 1
|
|
149
|
+
generate_internal_fork_number_of_type(internal_fork_numbers, possible_parents, o, 4, four_at_depth_counter)
|
|
150
|
+
end
|
|
151
|
+
|
|
152
|
+
if @type_3_paths.include?(o[:path])
|
|
153
|
+
three_at_depth_counter += 1
|
|
154
|
+
# We also need to check some extra possible parents here. It appears that when a type 3 is first, or possibly
|
|
155
|
+
# the only child in an object, the object will fail to be accounted for.
|
|
156
|
+
|
|
157
|
+
ob = o.dup
|
|
158
|
+
ob[:path] = ob[:path][0..-2]
|
|
159
|
+
|
|
160
|
+
if internal_fork_numbers[ob[:path]].nil?
|
|
161
|
+
|
|
162
|
+
@type_4_paths.each_with_index do |four_path, idx|
|
|
163
|
+
if four_path.length + 1 == o[:depth]
|
|
164
|
+
possible_parents << { depth: o[:depth] - 1, path: four_path, name: four_path.last, node_type: "array" }
|
|
165
|
+
four_at_depth_counter += 1
|
|
166
|
+
generate_internal_fork_number_of_type(internal_fork_numbers, possible_parents, ob, 4, four_at_depth_counter)
|
|
167
|
+
end
|
|
168
|
+
end
|
|
169
|
+
end
|
|
170
|
+
|
|
171
|
+
generate_internal_fork_number_of_type(internal_fork_numbers, possible_parents, o, 3, three_at_depth_counter)
|
|
172
|
+
end
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
if @type_5_paths.include?(o[:path])
|
|
176
|
+
five_at_depth_counter += 1
|
|
177
|
+
generate_internal_fork_number_of_type(internal_fork_numbers, possible_parents, o, 5, five_at_depth_counter)
|
|
178
|
+
end
|
|
179
|
+
end
|
|
180
|
+
|
|
181
|
+
depth_counter += 1
|
|
182
|
+
end
|
|
183
|
+
return internal_fork_numbers
|
|
184
|
+
end
|
|
185
|
+
|
|
186
|
+
def generate_internal_fork_number_of_type(internal_fork_numbers, possible_parents, object_at_depth, type, number)
|
|
187
|
+
parent = nil
|
|
188
|
+
if possible_parents
|
|
189
|
+
possible_parents.each do |pp|
|
|
190
|
+
parent = pp if object_at_depth[:path][0..-2] == pp[:path]
|
|
191
|
+
end
|
|
192
|
+
end
|
|
193
|
+
|
|
194
|
+
if (type == 2)
|
|
195
|
+
@type_2_indicator_names[number] = object_at_depth[:path].last
|
|
196
|
+
end
|
|
197
|
+
if (type == 4)
|
|
198
|
+
@type_4_indicator_names[number - 1] = object_at_depth[:path].last
|
|
199
|
+
end
|
|
200
|
+
internal_path = ""
|
|
201
|
+
internal_path = "#{internal_fork_numbers[parent[:path]][:internal_path]}" if parent.present?
|
|
202
|
+
|
|
203
|
+
if parent.present? && parent[:path].join("_B_") != 'root' && internal_fork_numbers[parent[:path]][:internal_path] != ''
|
|
204
|
+
internal_path << "-"
|
|
205
|
+
end
|
|
206
|
+
internal_path << "#{type}.#{number}"
|
|
207
|
+
internal_fork_numbers[object_at_depth[:path]] = { internal_path: internal_path, depth: object_at_depth[:depth] }
|
|
208
|
+
end
|
|
209
|
+
end
|
|
210
|
+
end
|