churn_vs_complexity 1.2.0 → 1.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +11 -0
  3. data/README.md +19 -2
  4. data/TODO +11 -0
  5. data/bin/churn_vs_complexity +5 -0
  6. data/lib/churn_vs_complexity/churn.rb +7 -2
  7. data/lib/churn_vs_complexity/cli.rb +21 -5
  8. data/lib/churn_vs_complexity/complexity/eslint_calculator.rb +34 -0
  9. data/lib/churn_vs_complexity/complexity/flog_calculator.rb +7 -6
  10. data/lib/churn_vs_complexity/complexity/pmd_calculator.rb +5 -2
  11. data/lib/churn_vs_complexity/complexity.rb +1 -0
  12. data/lib/churn_vs_complexity/concurrent_calculator.rb +2 -4
  13. data/lib/churn_vs_complexity/config.rb +81 -17
  14. data/lib/churn_vs_complexity/file_selector.rb +12 -1
  15. data/lib/churn_vs_complexity/git_date.rb +8 -1
  16. data/lib/churn_vs_complexity/serializer/csv.rb +14 -0
  17. data/lib/churn_vs_complexity/serializer/graph.rb +24 -0
  18. data/lib/churn_vs_complexity/serializer/pass_through.rb +21 -0
  19. data/lib/churn_vs_complexity/serializer/summary.rb +27 -0
  20. data/lib/churn_vs_complexity/serializer/summary_hash.rb +54 -0
  21. data/lib/churn_vs_complexity/serializer/timetravel/quality_calculator.rb +38 -0
  22. data/lib/churn_vs_complexity/serializer/timetravel/stats_calculator.rb +60 -0
  23. data/lib/churn_vs_complexity/serializer/timetravel.rb +103 -0
  24. data/lib/churn_vs_complexity/serializer.rb +7 -60
  25. data/lib/churn_vs_complexity/timetravel/traveller.rb +66 -0
  26. data/lib/churn_vs_complexity/timetravel/worktree.rb +56 -0
  27. data/lib/churn_vs_complexity/timetravel.rb +70 -0
  28. data/lib/churn_vs_complexity/version.rb +1 -1
  29. data/lib/churn_vs_complexity.rb +2 -0
  30. data/package-lock.json +6 -0
  31. data/tmp/eslint-support/complexity-calculator.js +51 -0
  32. data/tmp/eslint-support/package.json +11 -0
  33. data/tmp/template/graph.html +1 -4
  34. data/tmp/template/timetravel_graph.html +100 -0
  35. data/tmp/test-support/javascript/complex.js +43 -0
  36. data/tmp/test-support/javascript/moderate.js +12 -0
  37. data/tmp/test-support/javascript/simple.js +5 -0
  38. data/tmp/test-support/javascript/typescript-example.ts +26 -0
  39. data/tmp/timetravel/.keep +0 -0
  40. metadata +24 -2
@@ -0,0 +1,38 @@
1
+ # frozen_string_literal: true
2
+
3
+ module ChurnVsComplexity
4
+ module Serializer
5
+ module Timetravel
6
+ EPSILON = 0.0001
7
+
8
+ class QualityCalculator
9
+ def initialize(min_churn:, max_churn:, min_complexity:, max_complexity:)
10
+ @min_churn = min_churn
11
+ @max_churn = max_churn
12
+ @min_complexity = min_complexity
13
+ @max_complexity = max_complexity
14
+ end
15
+
16
+ def alpha_score(raw_churn, raw_complexity)
17
+ # harmonic mean of normalised churn and complexity
18
+ churn = normalise(raw_churn, @min_churn, @max_churn, EPSILON)
19
+ complexity = normalise(raw_complexity, @min_complexity, @max_complexity, EPSILON)
20
+
21
+ (2 * churn * complexity) / (churn + complexity)
22
+ end
23
+
24
+ def beta_score(raw_churn, raw_complexity)
25
+ # geometric mean of normalised churn and complexity
26
+ churn = normalise(raw_churn, @min_churn, @max_churn, EPSILON)
27
+ complexity = normalise(raw_complexity, @min_complexity, @max_complexity, EPSILON)
28
+
29
+ Math.sqrt(churn * complexity)
30
+ end
31
+
32
+ private
33
+
34
+ def normalise(score, min, max, epsilon) = (score + epsilon - min) / (epsilon + max - min)
35
+ end
36
+ end
37
+ end
38
+ end
@@ -0,0 +1,60 @@
1
+ # frozen_string_literal: true
2
+
3
+ module ChurnVsComplexity
4
+ module Serializer
5
+ module Timetravel
6
+ class StatsCalculator
7
+ # ['some_sha', { 'end_date' => '2024-01-01', 'values' => [[1, 2], [3, 4]] }]
8
+ def summaries(result)
9
+ observations = result.sort_by do |_sha, summary|
10
+ summary['end_date']
11
+ end.map { |entry| entry[1] }
12
+
13
+ quality_calculator = QualityCalculator.new(**extrema(observations))
14
+ observations.map do |o|
15
+ end_date = o['end_date']
16
+ scores = o['values'].map do |(churn, complexity)|
17
+ alpha = quality_calculator.alpha_score(churn, complexity)
18
+ beta = quality_calculator.beta_score(churn, complexity)
19
+ [churn, complexity, alpha, beta]
20
+ end
21
+ {
22
+ 'end_date' => end_date,
23
+ 'mean_churn' => mean(scores.map { |s| s[0] }),
24
+ 'median_churn' => median(scores.map { |s| s[0] }),
25
+ 'mean_complexity' => mean(scores.map { |s| s[1] }),
26
+ 'median_complexity' => median(scores.map { |s| s[1] }),
27
+ 'mean_alpha_score' => mean(scores.map { |s| s[2] }),
28
+ 'median_alpha_score' => median(scores.map { |s| s[2] }),
29
+ 'mean_beta_score' => mean(scores.map { |s| s[3] }),
30
+ 'median_beta_score' => median(scores.map { |s| s[3] }),
31
+ }
32
+ end
33
+ end
34
+
35
+ private
36
+
37
+ def extrema(observations)
38
+ churn_series = observations.flat_map { |o| o['values'] }.map { |(churn, _)| churn }
39
+ max_churn = churn_series.max
40
+ min_churn = churn_series.min
41
+
42
+ complexity_series = observations.flat_map { |o| o['values'] }.map { |(_, complexity)| complexity }
43
+ max_complexity = complexity_series.max
44
+ min_complexity = complexity_series.min
45
+
46
+ { max_churn:, min_churn:, max_complexity:, min_complexity: }
47
+ end
48
+
49
+ def mean(series)
50
+ series.sum / series.size
51
+ end
52
+
53
+ def median(series)
54
+ sorted = series.sort
55
+ sorted[sorted.size / 2]
56
+ end
57
+ end
58
+ end
59
+ end
60
+ end
@@ -0,0 +1,103 @@
1
+ # frozen_string_literal: true
2
+
3
+ require_relative 'timetravel/quality_calculator'
4
+ require_relative 'timetravel/stats_calculator'
5
+
6
+ module ChurnVsComplexity
7
+ module Serializer
8
+ module Timetravel
9
+ def self.summaries(result)
10
+ StatsCalculator.new.summaries(result)
11
+ end
12
+
13
+ def self.resolve(serializer:, git_period:, relative_period:, jump_days:)
14
+ case serializer
15
+ when :csv
16
+ CSV
17
+ when :graph
18
+ Graph.new(git_period:, relative_period:, jump_days:)
19
+ end
20
+ end
21
+
22
+ module CSV
23
+ def self.serialize(result)
24
+ summaries = Timetravel.summaries(result)
25
+
26
+ # 2. Add title row to front of summaries
27
+ summaries.unshift(
28
+ {
29
+ 'end_date' => 'Date',
30
+ 'mean_churn' => 'Mean Churn',
31
+ 'median_churn' => 'Median Churn',
32
+ 'mean_complexity' => 'Mean Complexity',
33
+ 'median_complexity' => 'Median Complexity',
34
+ 'mean_alpha_score' => 'Mean Alpha Score',
35
+ 'median_alpha_score' => 'Median Alpha Score',
36
+ 'mean_beta_score' => 'Mean Beta Score',
37
+ 'median_beta_score' => 'Median Beta Score',
38
+ },
39
+ )
40
+
41
+ # 3. convert to csv
42
+ summaries.map do |summary|
43
+ "#{summary['end_date']},#{summary['mean_churn']},#{summary['median_churn']},#{summary['mean_complexity']},#{summary['median_complexity']},#{summary['mean_alpha_score']},#{summary['median_alpha_score']},#{summary['mean_beta_score']},#{summary['median_beta_score']}"
44
+ end.join("\n")
45
+ end
46
+ end
47
+
48
+ # TODO: unit test
49
+ class Graph
50
+ def initialize(git_period:, relative_period:, jump_days:, template: Graph.load_template_file)
51
+ @template = template
52
+ @git_period = git_period
53
+ @relative_period = relative_period
54
+ @jump_days = jump_days
55
+ end
56
+
57
+ def self.load_template_file
58
+ file_path = File.expand_path('../../../tmp/template/timetravel_graph.html', __dir__)
59
+ File.read(file_path)
60
+ end
61
+
62
+ def serialize(result)
63
+ summaries = Timetravel.summaries(result)
64
+
65
+ data = summaries.map do |summary|
66
+ JSON.dump(summary)
67
+ end.join(",\n") + "\n"
68
+
69
+ @template.gsub("// INSERT DATA\n", data)
70
+ .gsub('INSERT TITLE', title)
71
+ .gsub('INSERT CHURN MODIFIER', churn_modifier)
72
+ end
73
+
74
+ private
75
+
76
+ def title
77
+ "#{churn_modifier}churn and complexity since #{since} evaluated every #{@jump_days} days"
78
+ end
79
+
80
+ def since
81
+ if @git_period.requested_start_date.nil?
82
+ 'start of project'
83
+ else
84
+ @git_period.effective_start_date.strftime('%Y-%m-%d').to_s
85
+ end
86
+ end
87
+
88
+ def churn_modifier
89
+ case @relative_period
90
+ when :month
91
+ 'Monthly '
92
+ when :quarter
93
+ 'Quarterly '
94
+ when :year
95
+ 'Yearly '
96
+ else
97
+ ''
98
+ end
99
+ end
100
+ end
101
+ end
102
+ end
103
+ end
@@ -1,5 +1,12 @@
1
1
  # frozen_string_literal: true
2
2
 
3
+ require_relative 'serializer/timetravel'
4
+ require_relative 'serializer/summary_hash'
5
+ require_relative 'serializer/summary'
6
+ require_relative 'serializer/csv'
7
+ require_relative 'serializer/graph'
8
+ require_relative 'serializer/pass_through'
9
+
3
10
  module ChurnVsComplexity
4
11
  module Serializer
5
12
  def self.title(result)
@@ -15,65 +22,5 @@ module ChurnVsComplexity
15
22
  module None
16
23
  def self.serialize(result) = result
17
24
  end
18
-
19
- module Summary
20
- def self.serialize(result)
21
- values_by_file = result[:values_by_file]
22
- churn_values = values_by_file.map { |_, values| values[0].to_f }
23
- complexity_values = values_by_file.map { |_, values| values[1].to_f }
24
-
25
- mean_churn = churn_values.sum / churn_values.size
26
- median_churn = churn_values.sort[churn_values.size / 2]
27
- mean_complexity = complexity_values.sum / complexity_values.size
28
- median_complexity = complexity_values.sort[complexity_values.size / 2]
29
-
30
- product = values_by_file.map { |_, values| values[0].to_f * values[1].to_f }
31
- mean_product = product.sum / product.size
32
- median_product = product.sort[product.size / 2]
33
-
34
- <<~SUMMARY
35
- #{Serializer.title(result)}
36
-
37
- Number of observations: #{values_by_file.size}
38
-
39
- Churn:
40
- Mean #{mean_churn}, Median #{median_churn}
41
-
42
- Complexity:
43
- Mean #{mean_complexity}, Median #{median_complexity}
44
-
45
- Product of churn and complexity:
46
- Mean #{mean_product}, Median #{median_product}
47
- SUMMARY
48
- end
49
- end
50
-
51
- module CSV
52
- def self.serialize(result)
53
- values_by_file = result[:values_by_file]
54
- values_by_file.map do |file, values|
55
- "#{file},#{values[0]},#{values[1]}\n"
56
- end.join
57
- end
58
- end
59
-
60
- class Graph
61
- def initialize(template: Graph.load_template_file)
62
- @template = template
63
- end
64
-
65
- def serialize(result)
66
- data = result[:values_by_file].map do |file, values|
67
- "{ file_path: '#{file}', churn: #{values[0]}, complexity: #{values[1]} }"
68
- end.join(",\n") + "\n"
69
- title = Serializer.title(result)
70
- @template.gsub("// INSERT DATA\n", data).gsub('INSERT TITLE', title)
71
- end
72
-
73
- def self.load_template_file
74
- file_path = File.expand_path('../../tmp/template/graph.html', __dir__)
75
- File.read(file_path)
76
- end
77
- end
78
25
  end
79
26
  end
@@ -0,0 +1,66 @@
1
+ # frozen_string_literal: true
2
+
3
+ module ChurnVsComplexity
4
+ # TODO: unit test and integration test
5
+ module Timetravel
6
+ class Traveller
7
+ def initialize(since:, relative_period:, engine:, serializer:, jump_days:, factory: Factory)
8
+ @relative_period = relative_period
9
+ @engine = engine
10
+ @jump_days = jump_days
11
+ @serializer = serializer
12
+ @git_period = GitDate.git_period(since, Time.now.to_date)
13
+ @factory = factory
14
+ end
15
+
16
+ def go(folder:)
17
+ git_strategy = @factory.git_strategy(folder:)
18
+ commits = git_strategy.resolve_commits_with_interval(git_period: @git_period, jump_days: @jump_days)
19
+
20
+ chunked = make_chunks(commits)
21
+ work_on(chunked:, folder:, git_strategy:)
22
+ combined = chunked.map { |c_and_p| read_result(c_and_p[:pipe]) }.reduce({}, :merge)
23
+
24
+ serializer.serialize(combined)
25
+ end
26
+
27
+ private
28
+
29
+ def work_on(chunked:, folder:, git_strategy:)
30
+ chunked.map.with_index do |c_and_p, i|
31
+ worktree = @factory.worktree(root_folder: folder, git_strategy:, number: i)
32
+ worktree.prepare
33
+ schedule_work(worktree:, **c_and_p)
34
+ end
35
+ end
36
+
37
+ def make_chunks(commits)
38
+ chunk_size = (commits.size / 3.0).ceil
39
+ commits.each_slice(chunk_size).map do |chunk|
40
+ { chunk:, pipe: @factory.pipe }
41
+ end.to_a
42
+ end
43
+
44
+ def read_result(pipe)
45
+ part = begin
46
+ JSON.parse(pipe[0].gets)
47
+ rescue StandardError => e
48
+ warn "Error parsing JSON: #{e}"
49
+ {}
50
+ end
51
+ pipe.each(&:close)
52
+ part
53
+ end
54
+
55
+ def schedule_work(chunk:, worktree:, pipe:)
56
+ @factory.worker(engine: @engine, worktree:)
57
+ .schedule(chunk:, pipe:)
58
+ end
59
+
60
+ def serializer
61
+ @factory.serializer(serializer: @serializer, git_period: @git_period,
62
+ relative_period: @relative_period, jump_days: @jump_days,)
63
+ end
64
+ end
65
+ end
66
+ end
@@ -0,0 +1,56 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'digest'
4
+ require 'tmpdir'
5
+
6
+ module ChurnVsComplexity
7
+ module Timetravel
8
+ class Worktree
9
+ attr_reader :folder
10
+
11
+ def initialize(root_folder:, git_strategy:, number:)
12
+ @root_folder = root_folder
13
+ @git_strategy = git_strategy
14
+ @number = number
15
+ end
16
+
17
+ def prepare
18
+ @folder = prepare_worktree
19
+ end
20
+
21
+ def checkout(sha)
22
+ raise Error, 'Worktree not prepared' if @folder.nil?
23
+
24
+ @git_strategy.checkout_in_worktree(@folder, sha)
25
+ end
26
+
27
+ def remove
28
+ raise Error, 'Worktree not prepared' if @folder.nil?
29
+
30
+ @git_strategy.remove_worktree(@folder)
31
+ end
32
+
33
+ private
34
+
35
+ def tt_folder
36
+ folder_hash = Digest::SHA256.hexdigest(@root_folder)[0..7]
37
+ File.join(Dir.tmpdir, 'churn_vs_complexity', 'timetravel', folder_hash)
38
+ end
39
+
40
+ def prepare_worktree
41
+ worktree_folder = File.join(tt_folder, "worktree_#{@number}")
42
+
43
+ unless File.directory?(worktree_folder)
44
+ begin
45
+ FileUtils.mkdir_p(worktree_folder)
46
+ rescue StandardError
47
+ nil
48
+ end
49
+ @git_strategy.add_worktree(worktree_folder)
50
+ end
51
+
52
+ worktree_folder
53
+ end
54
+ end
55
+ end
56
+ end
@@ -0,0 +1,70 @@
1
+ # frozen_string_literal: true
2
+
3
+ require_relative 'timetravel/traveller'
4
+ require_relative 'timetravel/worktree'
5
+
6
+ module ChurnVsComplexity
7
+ module Timetravel
8
+ class Factory
9
+ def self.git_strategy(folder:) = GitStrategy.new(folder:)
10
+ def self.pipe = IO.pipe
11
+ def self.worker(engine:, worktree:) = Worker.new(engine:, worktree:)
12
+ def self.worktree(root_folder:, git_strategy:, number:) = Worktree.new(root_folder:, git_strategy:, number:)
13
+ def self.serializer(**args) = Serializer::Timetravel.resolve(**args)
14
+ end
15
+
16
+ class Worker
17
+ def initialize(engine:, worktree:)
18
+ @engine = engine
19
+ @worktree = worktree
20
+ end
21
+
22
+ def schedule(chunk:, pipe:)
23
+ fork do
24
+ results = chunk.to_h do |commit|
25
+ sha = commit.sha
26
+ @worktree.checkout(sha)
27
+ result = @engine.check(folder: @worktree.folder)
28
+ [sha, result]
29
+ end
30
+ @worktree.remove
31
+ pipe[1].puts(JSON.dump(results))
32
+ pipe[1].close
33
+ end
34
+ end
35
+ end
36
+
37
+ class GitStrategy
38
+ def initialize(folder:)
39
+ @repo = Git.open(folder)
40
+ @folder = folder
41
+ end
42
+
43
+ def checkout_in_worktree(worktree_folder, sha)
44
+ command = "(cd #{worktree_folder} && git checkout #{sha}) > /dev/null 2>&1"
45
+ `#{command}`
46
+ end
47
+
48
+ def resolve_commits_with_interval(git_period:, jump_days:)
49
+ candidates = @repo.log(1_000_000).since(git_period.effective_start_date).until(git_period.end_date).to_a
50
+
51
+ commits_by_date = candidates.filter { |c| c.date.to_date >= git_period.effective_start_date }
52
+ .group_by { |c| c.date.to_date }
53
+
54
+ found_dates = GitDate.select_dates_with_at_least_interval(commits_by_date.keys, jump_days)
55
+
56
+ found_dates.map { |date| commits_by_date[date].max_by(&:date) }
57
+ end
58
+
59
+ def add_worktree(wt_folder)
60
+ command = "(cd #{@folder} && git worktree add -f #{wt_folder}) > /dev/null 2>&1"
61
+ `#{command}`
62
+ end
63
+
64
+ def remove_worktree(worktree_folder)
65
+ command = "(cd #{worktree_folder} && git worktree remove -f #{worktree_folder}) > /dev/null 2>&1"
66
+ `#{command}`
67
+ end
68
+ end
69
+ end
70
+ end
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module ChurnVsComplexity
4
- VERSION = '1.2.0'
4
+ VERSION = '1.4.0'
5
5
  end
@@ -13,7 +13,9 @@ require_relative 'churn_vs_complexity/cli'
13
13
  require_relative 'churn_vs_complexity/config'
14
14
  require_relative 'churn_vs_complexity/serializer'
15
15
  require_relative 'churn_vs_complexity/git_date'
16
+ require_relative 'churn_vs_complexity/timetravel'
16
17
 
17
18
  module ChurnVsComplexity
18
19
  class Error < StandardError; end
20
+ class ValidationError < Error; end
19
21
  end
data/package-lock.json ADDED
@@ -0,0 +1,6 @@
1
+ {
2
+ "name": "churn_vs_complexity",
3
+ "lockfileVersion": 3,
4
+ "requires": true,
5
+ "packages": {}
6
+ }
@@ -0,0 +1,51 @@
1
+ import { ESLint } from 'eslint';
2
+
3
+ import eslint from '@eslint/js';
4
+ import tseslint from 'typescript-eslint';
5
+
6
+ async function analyzeComplexity(files) {
7
+ const overrideConfig = tseslint.config(
8
+ eslint.configs.recommended,
9
+ ...tseslint.configs.recommended,
10
+ {
11
+ rules: {
12
+ 'complexity': ['warn', 0],
13
+ },
14
+
15
+ }
16
+ );
17
+
18
+ const linter = new ESLint({
19
+ overrideConfigFile: true,
20
+ overrideConfig,
21
+ cwd: '/',
22
+ });
23
+
24
+ try {
25
+ const results = await linter.lintFiles(files);
26
+ const complexityResults = results.map(result => {
27
+ const messages = result.messages.filter(msg => msg.ruleId === 'complexity');
28
+ const complexity = messages.reduce((sum, msg) => {
29
+ const complexityValue = parseInt(msg.message.match(/\d+/)[0], 10);
30
+ return sum + complexityValue;
31
+ }, 0);
32
+
33
+ if (complexity === 0) {
34
+ console.error("File has no complexity", result);
35
+ }
36
+
37
+ return {
38
+ file: result.filePath,
39
+ complexity,
40
+ };
41
+ });
42
+
43
+ console.log(JSON.stringify(complexityResults));
44
+ } catch (error) {
45
+ console.error('Error during analysis:', error);
46
+ process.exit(1);
47
+ }
48
+ }
49
+
50
+ const files = JSON.parse(process.argv[2]);
51
+ analyzeComplexity(files);
@@ -0,0 +1,11 @@
1
+ {
2
+ "type": "module",
3
+ "dependencies": {
4
+ "eslint-plugin-complexity": "^1.0.2",
5
+ "@eslint/js": "^9.11.1",
6
+ "@types/eslint__js": "^8.42.3",
7
+ "eslint": "^9.11.1",
8
+ "typescript": "^5.6.2",
9
+ "typescript-eslint": "^8.7.0"
10
+ }
11
+ }
@@ -16,10 +16,7 @@
16
16
  ];
17
17
 
18
18
  // Extract data for Chart.js
19
- const labels = dataPoints.map(point => point.file_path);
20
- const churnData = dataPoints.map(point => point.churn);
21
- const complexityData = dataPoints.map(point => point.complexity);
22
-
19
+ const labels = dataPoints.map(point => point.file_path);
23
20
  // Prepare data in Chart.js format
24
21
  const data = {
25
22
  labels: labels,
@@ -0,0 +1,100 @@
1
+ <!DOCTYPE html>
2
+ <html lang="en">
3
+ <head>
4
+ <meta charset="UTF-8">
5
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
6
+ <title>INSERT TITLE</title>
7
+ <script src="https://cdn.jsdelivr.net/npm/chart.js"></script>
8
+ <script src="https://cdn.jsdelivr.net/npm/chartjs-adapter-date-fns"></script>
9
+ <style>
10
+ body {
11
+ font-family: Arial, sans-serif;
12
+ }
13
+ h1 {
14
+ text-align: center;
15
+ font-size: 24px;
16
+ font-weight: bold;
17
+ margin-bottom: 20px;
18
+ color: #333;
19
+ }
20
+ canvas {
21
+ margin: 20px auto;
22
+ }
23
+ </style>
24
+ </head>
25
+ <body>
26
+ <h1>INSERT TITLE</h1>
27
+ <canvas id="complexityChart" width="800" height="400"></canvas>
28
+ <canvas id="churnChart" width="800" height="400"></canvas>
29
+ <canvas id="alphaScoreChart" width="800" height="400"></canvas>
30
+ <canvas id="betaScoreChart" width="800" height="400"></canvas>
31
+
32
+ <script>
33
+
34
+ const dataPoints = [
35
+ // INSERT DATA
36
+ ];
37
+
38
+ // Extract dates for x-axis
39
+ const labels = dataPoints.map(point => point.end_date);
40
+
41
+ // Function to create a dataset
42
+ function createDataset(label, data, color) {
43
+ return {
44
+ label: label,
45
+ data: data,
46
+ borderColor: color,
47
+ backgroundColor: color,
48
+ fill: false,
49
+ tension: 0.1
50
+ };
51
+ }
52
+
53
+ // Function to create a chart
54
+ function createChart(ctx, title, datasets) {
55
+ return new Chart(ctx, {
56
+ type: 'line',
57
+ data: { labels: labels, datasets: datasets },
58
+ options: {
59
+ responsive: true,
60
+ plugins: {
61
+ title: { display: true, text: title }
62
+ },
63
+ scales: {
64
+ x: { type: 'time', time: { parser: 'yyyy-MM-dd', tooltipFormat: 'll' } },
65
+ y: { beginAtZero: true }
66
+ }
67
+ }
68
+ });
69
+ }
70
+
71
+ // Create Complexity Chart
72
+ const complexityCtx = document.getElementById('complexityChart').getContext('2d');
73
+ createChart(complexityCtx, 'Complexity Over Time', [
74
+ createDataset('Mean Complexity', dataPoints.map(p => ({ x: p.end_date, y: p.mean_complexity })), 'rgb(75, 192, 192)'),
75
+ createDataset('Median Complexity', dataPoints.map(p => ({ x: p.end_date, y: p.median_complexity })), 'rgb(255, 99, 132)')
76
+ ]);
77
+
78
+ // Create Churn Chart
79
+ const churnCtx = document.getElementById('churnChart').getContext('2d');
80
+ createChart(churnCtx, 'INSERT CHURN MODIFIERChurn Over Time', [
81
+ createDataset('Mean Churn', dataPoints.map(p => ({ x: p.end_date, y: p.mean_churn })), 'rgb(54, 162, 235)'),
82
+ createDataset('Median Churn', dataPoints.map(p => ({ x: p.end_date, y: p.median_churn })), 'rgb(255, 206, 86)')
83
+ ]);
84
+
85
+ // Create Alpha Score Chart
86
+ const alphaScoreCtx = document.getElementById('alphaScoreChart').getContext('2d');
87
+ createChart(alphaScoreCtx, 'Alpha Score Over Time', [
88
+ createDataset('Mean Alpha Score', dataPoints.map(p => ({ x: p.end_date, y: p.mean_alpha_score })), 'rgb(153, 102, 255)'),
89
+ createDataset('Median Alpha Score', dataPoints.map(p => ({ x: p.end_date, y: p.median_alpha_score })), 'rgb(255, 159, 64)')
90
+ ]);
91
+
92
+ // Create Beta Score Chart
93
+ const betaScoreCtx = document.getElementById('betaScoreChart').getContext('2d');
94
+ createChart(betaScoreCtx, 'Beta Score Over Time', [
95
+ createDataset('Mean Beta Score', dataPoints.map(p => ({ x: p.end_date, y: p.mean_beta_score })), 'rgb(153, 102, 255)'),
96
+ createDataset('Median Beta Score', dataPoints.map(p => ({ x: p.end_date, y: p.median_beta_score })), 'rgb(255, 159, 64)')
97
+ ]);
98
+ </script>
99
+ </body>
100
+ </html>