log_weaver 0.0.1

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,16 @@
1
+ require 'fileutils'
2
+
3
+ if ENV['FAILFAST']
4
+ After do |s|
5
+ Cucumber.wants_to_quit = true if s.failed?
6
+ end
7
+ end
8
+
9
+ Given /^no file named "([^"]*)"$/ do |file_name|
10
+ check_file_presence( [file_name], false )
11
+ end
12
+
13
+ Before do
14
+ `rm -rf tmp`
15
+ end
16
+
@@ -0,0 +1,17 @@
1
+ require 'aruba/cucumber'
2
+ require 'methadone/cucumber'
3
+
4
+ ENV['PATH'] = "#{File.expand_path(File.dirname(__FILE__) + '/../../bin')}#{File::PATH_SEPARATOR}#{ENV['PATH']}"
5
+ LIB_DIR = File.join(File.expand_path(File.dirname(__FILE__)),'..','..','lib')
6
+
7
+ Before do
8
+ @aruba_timeout_seconds = 3600
9
+ # Using "announce" causes massive warnings on 1.9.2
10
+ @puts = true
11
+ @original_rubylib = ENV['RUBYLIB']
12
+ ENV['RUBYLIB'] = LIB_DIR + File::PATH_SEPARATOR + ENV['RUBYLIB'].to_s
13
+ end
14
+
15
+ After do
16
+ ENV['RUBYLIB'] = @original_rubylib
17
+ end
data/lib/log_weaver.rb ADDED
@@ -0,0 +1,4 @@
1
+ require 'require_all'
2
+
3
+ require_all "#{File.dirname(File.expand_path(__FILE__))}/log_weaver"
4
+
@@ -0,0 +1,37 @@
1
+ module LogWeaver
2
+
3
+ class CombinedLog
4
+ attr_accessor :logs
5
+
6
+ def initialize(logs)
7
+ @logs = logs
8
+ @index = CombinedLog::build_index(@logs)
9
+ end
10
+
11
+ def self.build_index(logs)
12
+ # need to sort by timestamp, then prefix
13
+ index = {}
14
+ logs.each do |log|
15
+ log.lines.each do |t,l|
16
+ key = CombinedLogIndexKey.new(log.prefix, t)
17
+ index[key] = l
18
+ end
19
+ end
20
+ #TODO: sorting at this point may have seriously bad performance for large logs; consider a
21
+ # data structure that stays sorted as you insert
22
+ Hash[index.sort]
23
+ end
24
+
25
+ def to_s
26
+ res = ""
27
+ @index.each do |key, lines|
28
+ lines.each do |l|
29
+ res << "#{key.prefix}#{l}\n"
30
+ end
31
+ end
32
+ res
33
+ end
34
+ end
35
+ end
36
+
37
+
@@ -0,0 +1,14 @@
1
+ module LogWeaver
2
+ CombinedLogIndexKey = Struct.new(:prefix, :timestamp) do
3
+ include Comparable
4
+
5
+ def <=>(other)
6
+ return timestamp <=> other.timestamp unless timestamp == other.timestamp
7
+ return prefix <=> other.prefix
8
+ end
9
+
10
+ def to_s
11
+ "#{prefix}#{timestamp}"
12
+ end
13
+ end
14
+ end
@@ -0,0 +1,14 @@
1
+
2
+ class Array
3
+ def uniq?
4
+ self.length == self.uniq.length
5
+ end
6
+ end
7
+
8
+ class Time
9
+ def to_s
10
+ self.strftime("%Y-%m-%d %H:%M:%S.%L")
11
+ end
12
+ end
13
+
14
+
@@ -0,0 +1,41 @@
1
+ require 'time'
2
+
3
+ module LogWeaver
4
+ class ParsedLog
5
+ attr_accessor :lines #TODO: rename; attr_reader should suffice
6
+ attr_reader :prefix
7
+
8
+ def initialize(prefix, log)
9
+ @prefix = prefix
10
+ @lines = ParsedLog.parse_log log
11
+ end
12
+
13
+ #private TODO: see http://stackoverflow.com/questions/4952980/creating-private-class-method; test per
14
+ # http://kailuowang.blogspot.ca/2010/08/testing-private-methods-in-rspec.html
15
+ def self.parse_log(log)
16
+ res = {}
17
+ previous_key = nil
18
+ log.string.split("\n").each do |line|
19
+ (timestamp, message) = extract_time_stamp(line)
20
+ if timestamp
21
+ key = timestamp
22
+ res[key] = [] unless key == previous_key
23
+ previous_key = key
24
+ else
25
+ raise ArgumentError, "Log does not begin with a timestamp." if previous_key.nil?
26
+ end
27
+
28
+ res[previous_key] << line #message
29
+ end
30
+ res
31
+ end
32
+
33
+ def self.extract_time_stamp(line)
34
+ timestamp = line[/^[0-9]{4}-[01][0-9]-[0-3][0-9] [0-2][0-9](:[0-5][0-9]){2}\.[0-9]{3}/,0]
35
+ message = line.sub(/^#{timestamp}/,'')
36
+ timestamp = Time.parse(timestamp) unless timestamp.nil?
37
+ [timestamp,message]
38
+ end
39
+
40
+ end
41
+ end
@@ -0,0 +1,11 @@
1
+ module LogWeaver
2
+ ParsedLogKey = Struct.new( :prefix, :timestamp, :count ) do
3
+ include Comparable
4
+
5
+ def <=>(other)
6
+ return timestamp <=> other.timestamp unless timestamp == other.timestamp
7
+ return count <=> other.count unless count == other.count
8
+ prefix <=> other.prefix
9
+ end
10
+ end
11
+ end
@@ -0,0 +1,98 @@
1
+ require 'methadone'
2
+
3
+ include Methadone::Main
4
+
5
+ module LogWeaver
6
+ module PrefixGenerator
7
+
8
+ # given an array of file paths, generate file name prefixes given the following rules:
9
+ # 1. prefixes have to differ
10
+ # 2. prefixes have to be at least as long as min_length, unless file name is shorter
11
+ # 3. if file names match, and are shorter than min_length, grab whole directories from directory path until they don't match
12
+ # results are returned as a hash keyed on passed-in file names
13
+ def get_file_prefixes(file_paths, min_length = 4)
14
+ # pseudocode:
15
+ # sort by base_name length
16
+ # get common prefix of base_names
17
+ # append letters to prefix from file name at least until min_length and all unique
18
+ # prepend directories until all unique
19
+
20
+ base_names = []
21
+ expanded_paths = []
22
+ processed_file_paths = {}
23
+ max_base_name_length = 0
24
+ max_path_component_length = 0
25
+
26
+ file_paths.each do |fp|
27
+ max_base_name_length = fp.length if fp.length > max_base_name_length
28
+ base_name = File.basename fp
29
+ base_names << base_name
30
+ processed_file_paths[fp] = {}
31
+ processed_file_paths[fp][:base_name] = base_name
32
+ processed_file_paths[fp][:expanded_path] = File.expand_path(fp)
33
+ expanded_paths << processed_file_paths[fp][:expanded_path]
34
+ path_dirs = processed_file_paths[fp][:expanded_path].split('/')
35
+ path_dirs.pop
36
+ processed_file_paths[fp][:path_dirs] = path_dirs
37
+ max_path_component_length = processed_file_paths[fp][:path_dirs].length if processed_file_paths[fp][:path_dirs].length > max_path_component_length
38
+ end
39
+
40
+ raise ArgumentError, "File list is not unique." unless expanded_paths.uniq?
41
+
42
+ # initialize accumulator data structures with the common prefix
43
+ prefix = get_longest_common_prefix base_names
44
+ prefixes = []
45
+ file_paths.each do |fp|
46
+ processed_file_paths[fp][:prefix] = prefix.dup
47
+ prefixes << processed_file_paths[fp][:prefix]
48
+ end
49
+
50
+ # append as many remaining characters from file basename as it will take to take us
51
+ # over min_length and make each prefix unique
52
+ (prefix.length .. max_base_name_length - 1).each do |i|
53
+ file_paths.each do |fp|
54
+ # append an additional letter; note, if nil, to_s will convert it to ""
55
+ processed_file_paths[fp][:prefix] << processed_file_paths[fp][:base_name][i].to_s
56
+ end
57
+ if i+1 >= min_length
58
+ break if prefixes.uniq?
59
+ end
60
+ end
61
+
62
+ # prepend dir path components if still not unique
63
+ (max_path_component_length - 1).downto(0) do |i|
64
+ break if prefixes.uniq?
65
+ file_paths.each do |fp|
66
+ processed_file_paths[fp][:prefix].insert(0, processed_file_paths[fp][:path_dirs][i].to_s + "/")
67
+ end
68
+ end
69
+
70
+ # pick out the results
71
+ res = {}
72
+ longest_prefix_length = 0
73
+ file_paths.each do |fp|
74
+ res[fp] = processed_file_paths[fp][:prefix]
75
+ longest_prefix_length = res[fp].length if res[fp].length > longest_prefix_length
76
+ end
77
+
78
+ file_paths.each do |fp|
79
+ orig_prefix_length = res[fp].length
80
+ res[fp] << ": " << " " * (longest_prefix_length - orig_prefix_length)
81
+ end
82
+
83
+ res
84
+ end
85
+
86
+ def get_longest_common_prefix(words)
87
+ words = words.dup
88
+ return nil if words.include? nil
89
+ prefix = words.shift.dup
90
+ until prefix == ""
91
+ break if words.all?{ |w| w =~ /^#{prefix}/ }
92
+ prefix.chop!
93
+ end
94
+ prefix
95
+ end
96
+
97
+ end
98
+ end
@@ -0,0 +1,3 @@
1
+ module LogWeaver
2
+ VERSION = "0.0.1"
3
+ end
@@ -0,0 +1,40 @@
1
+ # -*- encoding: utf-8 -*-
2
+ require File.expand_path('../lib/log_weaver/version', __FILE__)
3
+
4
+ Gem::Specification.new do |gem|
5
+ gem.add_development_dependency('readme', '~> 0.1')
6
+ require 'readme'
7
+ r = Readme.file
8
+ gem.authors = r.authors
9
+ gem.email = ["raphael.borowiecki@gmail.com"]
10
+ gem.description = r.description
11
+ gem.summary = "Weaves log files by timestamp."
12
+ gem.homepage = r.homepage
13
+
14
+ if File.directory?('.hg')
15
+ gem.files = `hg manifest`.split($\)
16
+ elsif File.directory?('.git')
17
+ gem.files = `git ls-files`.split($\)
18
+ else
19
+ raise "Need to build from a git or hg repo to generate manifest."
20
+ end
21
+
22
+ gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
23
+ gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
24
+ gem.name = "log_weaver"
25
+ gem.require_paths = ["lib"]
26
+ gem.version = LogWeaver::VERSION
27
+
28
+ gem.add_development_dependency('yard', '~> 0.8')
29
+ gem.add_development_dependency('unindent', '~> 1.0')
30
+ gem.add_development_dependency('aruba', '~> 0.5')
31
+ gem.add_development_dependency('cucumber', '~> 1.2')
32
+ gem.add_development_dependency('rspec', '~> 2.13')
33
+ gem.add_development_dependency('factory_girl', '~> 4.2')
34
+ gem.add_development_dependency('rake','~> 0.9')
35
+ gem.add_dependency('methadone', '~> 1.2')
36
+ gem.add_dependency('require_all', '~> 1.2')
37
+ end
38
+
39
+
40
+
@@ -0,0 +1,19 @@
1
+ require 'spec_helper'
2
+
3
+ module LogWeaver
4
+ class CombinedLogIndexKey
5
+ describe "#<=>" do
6
+ it "should compare timestamp first" do
7
+ ($k_p1_t2 <=> $k_p2_t1).should == ($k_p1_t2.timestamp <=> $k_p2_t1.timestamp)
8
+ end
9
+ it "should compare prefix second" do
10
+ ($k_p1_t1 <=> $k_p2_t1).should == ($k_p1_t1.prefix <=> $k_p2_t1.prefix)
11
+ end
12
+ end
13
+ describe"#to_s" do
14
+ it "prints 'prefix: timestamp'" do
15
+ $k_p1_t1.to_s.should == "#{$p1}#{$t1}"
16
+ end
17
+ end
18
+ end
19
+ end
@@ -0,0 +1,95 @@
1
+ require 'spec_helper'
2
+ require 'unindent'
3
+
4
+ module LogWeaver
5
+
6
+ def index(logs)
7
+ CombinedLog.new(logs).instance_variable_get(:index)
8
+ end
9
+
10
+
11
+ describe CombinedLog do
12
+ =begin
13
+ before(:all) do
14
+ end
15
+
16
+
17
+ describe ".initialize" do
18
+ it "stores the logs" do
19
+ @cl.logs.should == @logs
20
+ end
21
+ end
22
+ =end
23
+
24
+ describe ".build_index" do
25
+ # note: it's much briefer/easier/more readable to give symbolic descriptions
26
+ # for the examples; pn is the prefix, tn is the timestamp, ln are the line contents
27
+ =begin
28
+ it "handles [p1_t1_l1, p2_t2_l1] (2 logs, ordered timestamps, 1 line each)" do
29
+ CombinedLog.build_index([$pl_p1_t1_l1, $pl_p2_t2_l1]).to_a.should == $hash_p1_t1_l1_and_p2_t2_l1.to_a
30
+ end
31
+ it "handles [p1_t1_l1, p2_t1_l1] (2 logs, same timestamp, 1 line each)" do
32
+ CombinedLog.build_index([$pl_p1_t1_l1, $pl_p2_t1_l1]).to_a.should == $hash_p1_t1_l1_and_p2_t1_l1.to_a
33
+ end
34
+ it "handles [p1_t2_l1, p2_t1_l1] (2 logs, p2 timestamp comes before p1)" do
35
+ CombinedLog.build_index([$pl_p1_t2_l1, $pl_p2_t1_l1]).to_a.should == $hash_p1_t2_l1_and_p2_t1_l1.to_a
36
+ end
37
+
38
+
39
+ sum = @parsed_log_p1_t1l1_t2l1 + @parsed_log_p1_t1l2_t2l2
40
+ sum = @parsed_log_p1_t2l1 + @parsed_log_p2_t1l1
41
+ sum = @parsed_log_p1_t1l1_t2l1 + @parsed_log_p2_t1l1_t2l1
42
+ it "handles [p1_t2_l1, p2_t1_l1] (2 logs, p2 timestamp comes before p1)" do
43
+ cl = CombinedLog.build_index([$pl_p1_t2_l1, $pl_p2_t1_l1])
44
+ cl.should == $hash_p1_t2_l1_and_p2_t1_l1
45
+ CombinedLog.build_index([$pl_p1_t2_l1, $pl_p2_t1_l1]).to_a.should == $hash_p1_t2_l1_and_p2_t1_l1.to_a
46
+ end
47
+ =end
48
+ end
49
+
50
+ describe "#to_s" do
51
+ it "prints p1_t1_l1, p2_t2_l1" do
52
+ output = <<-eos
53
+ #{$out_p1_t1_l1}
54
+ #{$out_p2_t2_l1}
55
+ eos
56
+ CombinedLog.new([$pl_p1_t1_l1, $pl_p2_t2_l1]).to_s.should == output.unindent
57
+ end
58
+ it "prints p1_t1_l1, p2_t1_l1 (same timestamp across files)" do
59
+ output = <<-eos
60
+ #{$out_p1_t1_l1}
61
+ #{$out_p2_t1_l1}
62
+ eos
63
+ CombinedLog.new([$pl_p1_t1_l1, $pl_p2_t1_l1]).to_s.should == output.unindent
64
+ end
65
+ it "prints p1_t1_l1_t1_l2, p2_t1_l3_t1_l4 (same timestamp across files, more lines)" do
66
+ output = <<-eos
67
+ #{$out_p1_t1_l1}
68
+ #{$out_p1_t1_l2}
69
+ #{$out_p2_t1_l3}
70
+ #{$out_p2_t1_l4}
71
+ eos
72
+ CombinedLog.new([$pl_p1_t1_l1_t1_l2, $pl_p2_t1_l3_t1_l4]).to_s.should == output.unindent
73
+ end
74
+ it "prints p1_t1_l1_t2_l1, p2_t3_l1" do
75
+ output = <<-eos
76
+ #{$out_p1_t1_l1}
77
+ #{$out_p1_t2_l1}
78
+ #{$out_p2_t3_l1}
79
+ eos
80
+ CombinedLog.new([$pl_p1_t1_l1_t2_l1, $pl_p2_t3_l1]).to_s.should == output.unindent
81
+ end
82
+ =begin
83
+ # from parsed_log_spec
84
+ it "prepends the prefix to every line with a timestamp" do
85
+ ParsedLog.parse_log(@t1l1_t2l1_log, @p1).should == @hash_with_one_line_per_timestamp
86
+ end
87
+ it "does not prepend the prefix to lines with no time stamp" do
88
+ ParsedLog.parse_log(@log_with_missing_timestamps, @p1).should == @hash_with_more_than_one_line_per_timestamp
89
+ end
90
+ =end
91
+ end
92
+ end
93
+ end
94
+
95
+
@@ -0,0 +1,177 @@
1
+ require 'time'
2
+ require 'log_weaver'
3
+
4
+
5
+ # Vars common to specs and factories; I tried wrapping them in a module, i.e.:
6
+ # module CommonVariables
7
+ # def p1() 'p1' end
8
+ # end
9
+ # but having modules deep in Rspec get access to it by extending was way more voodoo
10
+ # than I can stomach for now. So we'll go with globals.
11
+
12
+ $p1 = 'p1:'
13
+ $p2 = 'p2:'
14
+
15
+ $t1 = Time.parse('2000-01-01 00:00:01.000') # NOTE: init time this way to discard values below msec
16
+ $t2 = $t1 + 1
17
+ $t3 = $t2 + 1
18
+
19
+ $l1 = ' l1'
20
+ $l2 = ' l2'
21
+ $l3 = ' l3'
22
+ $l4 = ' l4'
23
+
24
+ $t1_l1 = "#{$t1}#{$l1}"
25
+ $t1_l2 = "#{$t1}#{$l2}"
26
+ $t1_l3 = "#{$t1}#{$l3}"
27
+ $t1_l4 = "#{$t1}#{$l4}"
28
+ $t2_l1 = "#{$t2}#{$l1}"
29
+ $t2_l2 = "#{$t2}#{$l2}"
30
+ $t3_l1 = "#{$t3}#{$l1}"
31
+
32
+ $no_t_l1 = $l1
33
+ $no_t_l2 = $l2
34
+
35
+ $out_p1_t1_l1 = "#{$p1}#{$t1_l1}"
36
+ $out_p1_t1_l2 = "#{$p1}#{$t1_l2}"
37
+ $out_p1_t2_l1 = "#{$p1}#{$t2_l1}"
38
+ $out_p2_t1_l1 = "#{$p2}#{$t1_l1}"
39
+ $out_p2_t1_l3 = "#{$p2}#{$t1_l3}"
40
+ $out_p2_t1_l4 = "#{$p2}#{$t1_l4}"
41
+ $out_p2_t2_l1 = "#{$p2}#{$t2_l1}"
42
+ $out_p2_t3_l1 = "#{$p2}#{$t3_l1}"
43
+
44
+ $io_empty = StringIO.new
45
+ $io_t1_l1 = StringIO.new($t1_l1)
46
+ $io_t2_l1 = StringIO.new($t2_l1)
47
+ $io_t1_l1_t2_l1 = StringIO.new([$t1_l1, $t2_l1].join("\n"))
48
+ $io_t1_l1_t1_l2 = StringIO.new([$t1_l1, $t1_l2].join("\n"))
49
+ $io_no_t_l1_no_t_l2 = StringIO.new([$no_t_l1, $no_t_l2].join("\n"))
50
+ $io_no_t_l1_t1_l2 = StringIO.new([$no_t_l1, $t1_l2].join("\n"))
51
+ $io_t1_l1_no_t_l2 = StringIO.new([$t1_l1, $no_t_l2].join("\n"))
52
+
53
+ $io_t1l2_t2l2 = StringIO.new([$t1_l2, $t2_l2].join("\n"))
54
+ $io_with_missing_timestamps = StringIO.new([$t1_l1, $no_t_line, $t2_l1].join("\n"))
55
+ $io_with_duplicate_timestamp = StringIO.new([$t1_l1, $t1_l1].join("\n"))
56
+ $io_starting_with_no_timestamp = StringIO.new([$no_t_line, $t2_l1].join("\n"))
57
+
58
+ $hash_t1_l1 = { $t1 => [$t1_l1] }
59
+ $hash_t1_l1_t2_l1 = {
60
+ $t1 => [$t1_l1],
61
+ $t2 => [$t2_l1]
62
+ }
63
+ $hash_t1_l3_t1_l4 = {
64
+ $t1 => [$t1_l3, $t1_l4]
65
+ }
66
+ $hash_t2_l1 = { $t2 => [$t2_l1] }
67
+ $hash_t3_l1 = { $t3 => [$t3_l1] }
68
+ $hash_t1_l1_t1_l2 = {
69
+ $t1 => [$t1_l1, $t1_l2]
70
+ }
71
+ $hash_t1_l1_no_t_l2 = {
72
+ $t1 => [$t1_l1, $no_t_l2]
73
+ }
74
+
75
+ # need to monkey-patch in argumentless constructors for FactoryGirl to be happy;
76
+ # see http://stackoverflow.com/a/6838145/26819
77
+ module LogWeaver
78
+ class ParsedLog
79
+ attr_accessor :prefix
80
+ def initialize
81
+ end
82
+ end
83
+ end
84
+
85
+ FactoryGirl.define do
86
+ #TODO: can this be LogWeaver::ParsedLog instead of a string?
87
+ factory 'log_weaver/parsed_log' do
88
+ factory :pl_p1 do
89
+ prefix {$p1} #TODO: are the curlies needed here?
90
+ factory :pl_p1_t1_l1 do
91
+ lines $hash_t1_l1
92
+ end
93
+ factory :pl_p1_t1_l1_t1_l2 do
94
+ lines $hash_t1_l1_t1_l2
95
+ end
96
+ factory :pl_p1_t1_l1_t2_l1 do
97
+ lines $hash_t1_l1_t2_l1
98
+ end
99
+ factory :pl_p1_t2_l1 do
100
+ lines $hash_t2_l1
101
+ end
102
+ end
103
+
104
+ factory :pl_p2 do
105
+ prefix $p2
106
+ factory :pl_p2_t1_l1 do
107
+ lines $hash_t1_l1
108
+ end
109
+ factory :pl_p2_t1_l3_t1_l4 do
110
+ lines $hash_t1_l3_t1_l4
111
+ end
112
+ factory :pl_p2_t2_l1 do
113
+ lines $hash_t2_l1
114
+ end
115
+ factory :pl_p2_t3_l1 do
116
+ lines $hash_t3_l1
117
+ end
118
+ end
119
+ end
120
+
121
+ $pl_p1_t1_l1 = FactoryGirl.build :pl_p1_t1_l1
122
+ $pl_p1_t1_l1_t1_l2 = FactoryGirl.build :pl_p1_t1_l1_t1_l2
123
+ $pl_p1_t1_l1_t2_l1 = FactoryGirl.build :pl_p1_t1_l1_t2_l1
124
+ $pl_p1_t2_l1 = FactoryGirl.build :pl_p1_t2_l1
125
+ $pl_p2_t1_l1 = FactoryGirl.build :pl_p2_t1_l1
126
+ $pl_p2_t1_l3_t1_l4 = FactoryGirl.build :pl_p2_t1_l3_t1_l4
127
+ $pl_p2_t2_l1 = FactoryGirl.build :pl_p2_t2_l1
128
+ $pl_p2_t3_l1 = FactoryGirl.build :pl_p2_t3_l1
129
+
130
+
131
+ factory 'log_weaver/combined_log_index_key' do
132
+ factory :k_p1 do
133
+ prefix $p1
134
+ factory :k_p1_t1 do
135
+ timestamp $t1
136
+ end
137
+ factory :k_p1_t2 do
138
+ timestamp $t2
139
+ end
140
+ end
141
+ factory :k_p2 do
142
+ prefix $p2
143
+ factory :k_p2_t1 do
144
+ timestamp $t1
145
+ end
146
+ factory :k_p2_t2 do
147
+ timestamp $t2
148
+ end
149
+ end
150
+ end
151
+ end
152
+
153
+ $k_p1_t1 = FactoryGirl.build :k_p1_t1
154
+ $k_p1_t2 = FactoryGirl.build :k_p1_t2
155
+ $k_p2_t1 = FactoryGirl.build :k_p2_t1
156
+ $k_p2_t2 = FactoryGirl.build :k_p2_t2
157
+
158
+ $hash_p1_t1_l1_and_p2_t2_l1 = {
159
+ $k_p1_t1 => [$l1],
160
+ $k_p2_t2 => [$l1]
161
+ }
162
+
163
+ $hash_p1_t1_l1_and_p2_t1_l1 = {
164
+ $k_p1_t1 => [$l1],
165
+ $k_p2_t1 => [$l1]
166
+ }
167
+
168
+ $hash_p1_t2_l1_and_p2_t1_l1 = {
169
+ $k_p2_t1 => [$l1],
170
+ $k_p1_t2 => [$l1]
171
+ }
172
+
173
+
174
+
175
+
176
+
177
+