symian 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,107 @@
1
+ require 'yaml'
2
+
3
+ module Symian
4
+ class TraceCollector
5
+ extend Forwardable
6
+
7
+ # attributes to store
8
+ ATTRIBUTES = [ :incidents, :events, :support_groups ]
9
+
10
+ def initialize(backend, opts={})
11
+ @backend = case backend
12
+ when :memory
13
+ MemoryBackend.new
14
+ when :yaml
15
+ raise ArgumentError, 'File not specified' unless opts[:file]
16
+ YAMLBackend.new(opts[:file])
17
+ # when :marshal
18
+ # MarshalBackend.new
19
+ # when :json
20
+ # JsonBackend.new
21
+ else
22
+ raise ArgumentError, 'Unsupported backend!'
23
+ end
24
+ end
25
+
26
+ # methods to dynamically generate
27
+ METHODS = [ :save_and_close, ATTRIBUTES.collect{ |attr| [ "#{attr}", "record_#{attr}", "with_#{attr}" ] } ].flatten!
28
+
29
+ # delegate methods to @backend
30
+ def_delegators :@backend, *METHODS
31
+
32
+ end
33
+
34
+
35
+ class MemoryBackend
36
+
37
+ def initialize
38
+ TraceCollector::ATTRIBUTES.each do |attr|
39
+ instance_variable_set("@#{attr}_storage", [])
40
+ end
41
+ end
42
+
43
+ TraceCollector::ATTRIBUTES.each do |attr|
44
+ class_eval <<-EOS
45
+ def record_#{attr}(elem)
46
+ if Array === elem
47
+ @#{attr}_storage += elem
48
+ else
49
+ @#{attr}_storage << elem
50
+ end
51
+ self
52
+ end
53
+
54
+ def #{attr}
55
+ @#{attr}_storage.size
56
+ end
57
+
58
+ def with_#{attr}
59
+ if block_given?
60
+ @#{attr}_storage.each do |el|
61
+ yield el
62
+ end
63
+ else
64
+ Enumerator.new(@#{attr}_storage)
65
+ end
66
+ end
67
+ EOS
68
+ end
69
+
70
+ def save_and_close
71
+ # raise NotImplementedError, 'A trace with memory backend cannot be saved!'
72
+ end
73
+
74
+ end
75
+
76
+
77
+ class YAMLBackend < MemoryBackend
78
+
79
+ def initialize(filename)
80
+ @filename = filename
81
+ # if file exists and is non-empty, try to read its contents
82
+ size = File.size?(@filename)
83
+ if !size.nil? and size > 0
84
+ hash = File.open(@filename) do |file|
85
+ YAML.load(file)
86
+ end
87
+ TraceCollector::ATTRIBUTES.map(&:to_s).each do |attr|
88
+ instance_variable_set("@#{attr}_storage", hash[attr])
89
+ end
90
+ else
91
+ super()
92
+ end
93
+ end
94
+
95
+ def save_and_close
96
+ hash = {}
97
+ TraceCollector::ATTRIBUTES.map(&:to_s).each do |attr|
98
+ hash[attr] = instance_variable_get("@#{attr}_storage")
99
+ end
100
+ File.open(@filename, 'w') do |file|
101
+ YAML.dump(hash, file)
102
+ end
103
+ end
104
+
105
+ end
106
+
107
+ end
@@ -0,0 +1,168 @@
1
+ require 'csv'
2
+ require 'stringio'
3
+
4
+ require 'erv'
5
+
6
+
7
+ module Symian
8
+ class TransitionMatrix
9
+
10
+ # this is mostly for testing purposes
11
+ attr_reader :transition_probabilities
12
+
13
+ def initialize(input)
14
+ # allow filename, string, and IO objects as input
15
+ if input.kind_of?(String)
16
+ if File.exists?(input)
17
+ input = File.new(input, 'r')
18
+ else
19
+ input = StringIO.new(input.strip.split("\n").collect{|l| l.strip }.join("\n"))
20
+ end
21
+ else
22
+ raise RuntimeError unless input.respond_to?(:read)
23
+ end
24
+
25
+ @transition_probabilities = {}
26
+
27
+ # process escalation matrix
28
+ headers = nil
29
+ CSV.parse(input.read, :headers => :first_row) do |row|
30
+ headers ||= row.headers
31
+ @sg_names ||= headers[1..-2]
32
+
33
+ # make sure that support groups do not include the "In" virtual support group
34
+ raise RuntimeError if @sg_names.include?("In")
35
+
36
+ # make sure that last support group is the "Out" virtual support group
37
+ raise RuntimeError unless headers[-1] == "Out"
38
+
39
+ sg_name = row[0] # the first row element is the support group name
40
+
41
+ # make sure support group name is valid
42
+ raise RuntimeError unless sg_name == "In" or @sg_names.include?(sg_name)
43
+
44
+ # make sure we are not overwriting existing data
45
+ raise RuntimeError if @transition_probabilities[sg_name]
46
+ @transition_probabilities[sg_name] = []
47
+
48
+ # prepare corresponding row in transition matrix
49
+ 2.upto(row.length) do |i|
50
+ escalations = Integer(row[i-1]) # raises ArgumentError in case of errors
51
+ if escalations > 0
52
+ @transition_probabilities[sg_name] << { :sg_name => headers[i-1],
53
+ :escalations => escalations }
54
+ end
55
+ end
56
+
57
+ # calculate normalized probabilities
58
+ normalize_probabilities(@transition_probabilities[sg_name])
59
+ end
60
+
61
+ # check that we have transition probabilities for each support group
62
+ [ "In", *@sg_names].each do |name|
63
+ raise RuntimeError unless @transition_probabilities.has_key?(name)
64
+ end
65
+
66
+ # TODO: make seeding of this thing configurable...
67
+ @rng = ERV::RandomVariable.new(:distribution => :uniform, :min_value => 0.0, :max_value => 1.0)
68
+ end
69
+
70
+
71
+ def escalation(from)
72
+ # raise error if source support group does not exist
73
+ raise ArgumentError unless tps = @transition_probabilities[from]
74
+
75
+ # get random value
76
+ x = @rng.next
77
+
78
+ # return name of first support group whose (cumulative)
79
+ # transition probability is larger than x
80
+ tps.each do |el|
81
+ return el[:sg_name] if el[:probability] > x
82
+ end
83
+
84
+ # the destination support group was not found
85
+ raise RuntimeError
86
+ end
87
+
88
+
89
+ def merge(sg1_name, sg2_name, new_name=nil)
90
+ # raise error if support groups do not exist
91
+ raise RuntimeError unless sg1_probs = @transition_probabilities.delete(sg1_name) and
92
+ sg2_probs = @transition_probabilities.delete(sg2_name)
93
+
94
+ new_sg_name = new_name || "Merge_of_%s_and_%s" % [ sg1_name, sg2_name ]
95
+
96
+ # recalculate escalations to new sg
97
+ @transition_probabilities.each do |k,v|
98
+
99
+ # add escalation information for new group
100
+ escalations = 0
101
+ v.each do |el|
102
+ if el[:sg_name] == sg1_name or el[:sg_name] == sg2_name
103
+ escalations += el[:escalations]
104
+ end
105
+ end
106
+
107
+ v << { :sg_name => new_sg_name,
108
+ :escalations => escalations }
109
+
110
+ # remove old escalation information
111
+ v.delete_if {|el| el[:sg_name] == sg1_name or el[:sg_name] == sg2_name }
112
+
113
+ # recalculate normalized probabilities
114
+ normalize_probabilities(v)
115
+ end
116
+
117
+ # update @sg_names
118
+ @sg_names[@sg_names.index(sg1_name)] = new_name
119
+ @sg_names.delete(sg2_name)
120
+
121
+ # recalculate escalations from new sg
122
+ total_escalation_info = sg1_probs + sg2_probs
123
+ @transition_probabilities[new_sg_name] = []
124
+ [ @sg_names, "Out" ].flatten!.each do |name|
125
+ escalations = total_escalation_info.inject(0) do |sum,el|
126
+ sum + (el[:sg_name] == name ? el[:escalations] : 0)
127
+ end
128
+
129
+ if escalations > 0
130
+ @transition_probabilities[new_sg_name] << { :sg_name => name,
131
+ :escalations => escalations }
132
+ end
133
+ end
134
+
135
+ # recalculate normalized probabilities
136
+ normalize_probabilities(@transition_probabilities[new_sg_name])
137
+ end
138
+
139
+
140
+ def to_s
141
+ lines = [ "From/To,#{@sg_names.join(',')},Out" ]
142
+ [ "In", *@sg_names ].each do |input_sg|
143
+ escalations = [ @sg_names, "Out" ].flatten!.map do |output_sg|
144
+ @transition_probabilities[input_sg].map{|x| x[:sg_name] == output_sg ? x[:escalations] : nil }.compact.first || 0
145
+ end
146
+ lines << "#{input_sg},#{escalations.join(',')}"
147
+ end
148
+ lines.join("\n")
149
+ end
150
+
151
+
152
+ private
153
+ def normalize_probabilities(probability_vector)
154
+ # calculate total escalations
155
+ total_escalations = probability_vector.inject(0) { |sum,el| sum += el[:escalations] }
156
+
157
+ # probability values are cumulative
158
+ cumulative_escalations = 0
159
+ probability_vector.each do |el|
160
+ cumulative_escalations += el[:escalations]
161
+ el[:probability] = cumulative_escalations.to_f / total_escalations.to_f
162
+ end
163
+
164
+ # just in case...
165
+ probability_vector[-1][:probability] = 1.0
166
+ end
167
+ end
168
+ end
@@ -0,0 +1,3 @@
1
+ module Symian
2
+ VERSION = '0.1.0'
3
+ end
@@ -0,0 +1,158 @@
1
+ module Symian
2
+ class WorkShift
3
+
4
+ def initialize(type, params={})
5
+ case type
6
+ when :predefined
7
+ # get workshift id
8
+ raise ArgumentError unless params[:id]
9
+ wsid = params[:id]
10
+
11
+ # retrieve predefined workshift
12
+ predefined_workshift = WORKSHIFT_TABLE[wsid]
13
+ raise ArgumentError unless predefined_workshift
14
+
15
+ # load start_time and end_time from predefined workshift
16
+ @start_time = predefined_workshift[:start_time]
17
+ @end_time = predefined_workshift[:end_time]
18
+ when :custom
19
+ # load start_time and end_time from parameters
20
+ raise ArgumentError unless params[:start_time] and params[:end_time]
21
+ @start_time = params[:start_time]
22
+ @end_time = params[:end_time]
23
+ when :all_day_long
24
+ # nothing to do
25
+ else
26
+ raise ArgumentError
27
+ end
28
+
29
+ # save work shift type
30
+ @type = type
31
+
32
+ unless @type == :all_day_long
33
+ # normalize start_time and end_time by transforming them from
34
+ # instances of (Date)Time class to integers representing the
35
+ # number of seconds elapsed from last midnight UTC
36
+ @start_time = @start_time.utc.seconds_since_midnight.round
37
+ @end_time = @end_time.utc.seconds_since_midnight.round
38
+ end
39
+
40
+ # check if it is an overnight work shift
41
+ @overnight = (@type == :all_day_long ? false : @end_time < @start_time)
42
+ end
43
+
44
+
45
+ def active_at?(time)
46
+ return true if @type == :all_day_long
47
+
48
+ t = time.utc.seconds_since_midnight.round
49
+ if @overnight
50
+ t <= @end_time or t >= @start_time
51
+ else
52
+ @start_time <= t and t <= @end_time
53
+ end
54
+ end
55
+
56
+
57
+ def secs_to_end_of_shift(time)
58
+ raise 'secs_to_end_of_shift called for unavailable operator' unless active_at?(time)
59
+ return Infinity if @type == :all_day_long
60
+
61
+ t = time.utc.seconds_since_midnight.round
62
+ res = if @overnight
63
+ if t <= @end_time
64
+ @end_time - t
65
+ elsif t >= @start_time
66
+ @end_time + 1.day - t
67
+ else
68
+ # TODO: else raise error
69
+ raise 'Weird error in secs_to_end_of_shift!'
70
+ end
71
+ else
72
+ # TODO: raise error if t < @start_time or t > @end_time
73
+ raise 'Weirder error in secs_to_end_of_shift!' if t < @start_time or t > @end_time
74
+ @end_time - t
75
+ end
76
+
77
+ # need to convert to integer
78
+ res.round
79
+ end
80
+
81
+
82
+ def secs_to_begin_of_shift(time)
83
+ raise 'secs_to_begin_of_shift called for available operator' if active_at?(time)
84
+
85
+ t = time.utc.seconds_since_midnight.round
86
+ res = if @overnight
87
+ # TODO: raise error if t < @end_time or t > @start_time
88
+ raise 'Weirder error in secs_to_begin_of_shift!' if t < @end_time or t > @start_time
89
+ @start_time - t
90
+ else
91
+ if t <= @start_time
92
+ @start_time - t
93
+ elsif t >= @end_time
94
+ @start_time + 1.day - t
95
+ else
96
+ # TODO: else raise error
97
+ raise 'Weird error in secs_to_begin_of_shift!'
98
+ end
99
+ end
100
+
101
+ # need to convert to integer
102
+ res.round
103
+ end
104
+
105
+
106
+ def duration
107
+ return Infinity if @type == :all_day_long
108
+
109
+ res = if @overnight
110
+ 1.day.to_i - @start_time + @end_time
111
+ else
112
+ @end_time - @start_time
113
+ end
114
+
115
+ # need to convert to integer
116
+ res.round
117
+ end
118
+
119
+
120
+ # 24x7 work shift
121
+ WORKSHIFT_24x7 = WorkShift.new(:all_day_long)
122
+
123
+ # an infinitely large value
124
+ Infinity = 1.0/0.0
125
+
126
+ # the predefined work shift table
127
+ WORKSHIFT_TABLE = {
128
+ brt: { # UTC-3, 9AM to 5PM
129
+ start_time: Time.utc(2000, 'Jan', 1, 12, 0, 0),
130
+ end_time: Time.utc(2000, 'Jan', 1, 20, 0, 0),
131
+ },
132
+ brst: { # UTC-2, 9AM to 5PM
133
+ start_time: Time.utc(2000, 'Jan', 1, 11, 0, 0),
134
+ end_time: Time.utc(2000, 'Jan', 1, 19, 0, 0),
135
+ },
136
+ est: { # UTC-5, 9AM to 5PM
137
+ start_time: Time.utc(2000, 'Jan', 1, 14, 0, 0),
138
+ end_time: Time.utc(2000, 'Jan', 1, 22, 0, 0),
139
+ },
140
+ edt: { # UTC-4, 9AM to 5PM
141
+ start_time: Time.utc(2000, 'Jan', 1, 13, 0, 0),
142
+ end_time: Time.utc(2000, 'Jan', 1, 21, 0, 0),
143
+ },
144
+ cet: { # UTC+1, 9AM to 5PM
145
+ start_time: Time.utc(2000, 'Jan', 1, 8, 0, 0),
146
+ end_time: Time.utc(2000, 'Jan', 1, 16, 0, 0),
147
+ },
148
+ cest: { # UTC+2, 9AM to 5PM
149
+ start_time: Time.utc(2000, 'Jan', 1, 7, 0, 0),
150
+ end_time: Time.utc(2000, 'Jan', 1, 15, 0, 0),
151
+ },
152
+ ist: { # UTC+5:30, 9AM to 5PM
153
+ start_time: Time.utc(2000, 'Jan', 1, 3, 30, 0),
154
+ end_time: Time.utc(2000, 'Jan', 1, 11, 30, 0),
155
+ },
156
+ }
157
+ end
158
+ end
@@ -0,0 +1,29 @@
1
+ # coding: utf-8
2
+ lib = File.expand_path('../lib', __FILE__)
3
+ $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
4
+ require 'symian/version'
5
+
6
+ Gem::Specification.new do |spec|
7
+ spec.name = 'symian'
8
+ spec.version = Symian::VERSION
9
+ spec.authors = ['Mauro Tortonesi']
10
+ spec.email = ['mauro.tortonesi@unife.it']
11
+ spec.description = %q{A Decision Support Tool for the Performance Optimization of IT Support Organizations}
12
+ spec.summary = %q{A support tool for strategic and business-driven decision making in the performance optimization of the IT incident management process}
13
+ spec.homepage = 'https://github.com/mtortonesi/symian'
14
+ spec.license = 'MIT'
15
+
16
+ spec.files = `git ls-files`.split($/).reject{|x| x == '.gitignore' }
17
+ spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
18
+ spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
19
+ spec.require_paths = ['lib']
20
+
21
+ spec.add_dependency 'activesupport', '~> 4.0.0'
22
+ spec.add_dependency 'awesome_print', '~> 1.2.0'
23
+ spec.add_dependency 'erv', '~> 0.0.2'
24
+ spec.add_dependency 'ice_nine', '~> 0.11.0'
25
+
26
+ spec.add_development_dependency 'bundler', '~> 1.6.2'
27
+ spec.add_development_dependency 'rake', '~> 10.1.1'
28
+ spec.add_development_dependency 'minitest-spec-context', '~> 0.0.3'
29
+ end