xcpretty-bb 0.1.12.bb1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.gitignore +19 -0
- data/.hound.yml +2 -0
- data/.kick +17 -0
- data/.rubocop.yml +239 -0
- data/.travis.yml +11 -0
- data/CHANGELOG.md +200 -0
- data/CONTRIBUTING.md +64 -0
- data/Gemfile +9 -0
- data/LICENSE.txt +61 -0
- data/README.md +93 -0
- data/Rakefile +26 -0
- data/assets/report.html.erb +172 -0
- data/bin/xcpretty +85 -0
- data/features/assets/RACCommandSpec, line 80, hello xcpretty.png +0 -0
- data/features/assets/apple_raw.png +0 -0
- data/features/custom_formatter.feature +15 -0
- data/features/fixtures/xcodebuild.log +5963 -0
- data/features/html_report.feature +54 -0
- data/features/json_compilation_database_report.feature +21 -0
- data/features/junit_report.feature +44 -0
- data/features/knock_format.feature +11 -0
- data/features/simple_format.feature +204 -0
- data/features/steps/formatting_steps.rb +330 -0
- data/features/steps/html_steps.rb +32 -0
- data/features/steps/json_steps.rb +37 -0
- data/features/steps/junit_steps.rb +39 -0
- data/features/steps/report_steps.rb +22 -0
- data/features/steps/xcpretty_steps.rb +31 -0
- data/features/support/env.rb +117 -0
- data/features/tap_format.feature +31 -0
- data/features/test_format.feature +49 -0
- data/features/xcpretty.feature +14 -0
- data/lib/xcpretty/ansi.rb +72 -0
- data/lib/xcpretty/formatters/formatter.rb +177 -0
- data/lib/xcpretty/formatters/knock.rb +35 -0
- data/lib/xcpretty/formatters/rspec.rb +33 -0
- data/lib/xcpretty/formatters/simple.rb +200 -0
- data/lib/xcpretty/formatters/tap.rb +40 -0
- data/lib/xcpretty/parser.rb +591 -0
- data/lib/xcpretty/printer.rb +24 -0
- data/lib/xcpretty/reporters/html.rb +98 -0
- data/lib/xcpretty/reporters/json_compilation_database.rb +62 -0
- data/lib/xcpretty/reporters/junit.rb +102 -0
- data/lib/xcpretty/snippet.rb +38 -0
- data/lib/xcpretty/syntax.rb +51 -0
- data/lib/xcpretty/term.rb +14 -0
- data/lib/xcpretty/version.rb +4 -0
- data/lib/xcpretty.rb +37 -0
- data/spec/fixtures/NSStringTests.m +64 -0
- data/spec/fixtures/constants.rb +600 -0
- data/spec/fixtures/custom_formatter.rb +18 -0
- data/spec/fixtures/oneliner.m +1 -0
- data/spec/fixtures/raw_kiwi_compilation_fail.txt +24 -0
- data/spec/fixtures/raw_kiwi_fail.txt +1896 -0
- data/spec/fixtures/raw_specta_fail.txt +3110 -0
- data/spec/spec_helper.rb +7 -0
- data/spec/support/matchers/colors.rb +21 -0
- data/spec/xcpretty/ansi_spec.rb +47 -0
- data/spec/xcpretty/formatters/formatter_spec.rb +140 -0
- data/spec/xcpretty/formatters/rspec_spec.rb +56 -0
- data/spec/xcpretty/formatters/simple_spec.rb +173 -0
- data/spec/xcpretty/parser_spec.rb +542 -0
- data/spec/xcpretty/printer_spec.rb +55 -0
- data/spec/xcpretty/snippet_spec.rb +46 -0
- data/spec/xcpretty/syntax_spec.rb +39 -0
- data/spec/xcpretty/term_spec.rb +26 -0
- data/xcpretty.gemspec +37 -0
- metadata +237 -0
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
Given(/^some big input$/) do
|
|
2
|
+
add_run_input File.open('features/fixtures/xcodebuild.log', 'r').read
|
|
3
|
+
end
|
|
4
|
+
|
|
5
|
+
Then(/^I should have a JSON compilation database in a custom path$/) do
|
|
6
|
+
step("I should have a JSON compilation database at \"#{custom_report_path}\"")
|
|
7
|
+
end
|
|
8
|
+
|
|
9
|
+
Then(/^I should have a JSON compilation database at "(.*?)"$/) do |path|
|
|
10
|
+
json = JSON.parse(File.open(path, 'r').read)
|
|
11
|
+
json.should_not be_nil
|
|
12
|
+
end
|
|
13
|
+
|
|
14
|
+
Then(/^I should have JSON compilation databases in two custom paths$/) do
|
|
15
|
+
step("I should have a JSON compilation database at \"#{custom_report_path}\"")
|
|
16
|
+
step("I should have a JSON compilation database at \"#{other_custom_report_path}\"")
|
|
17
|
+
end
|
|
18
|
+
|
|
19
|
+
Then(/^the JSON compilation database should contain an entry with a command$/) do
|
|
20
|
+
json_db.length.should == 1
|
|
21
|
+
json_db[0]['command'].should start_with('/Applications/Xcode.app/Contents/Developer')
|
|
22
|
+
json_db[0]['command'].should end_with('.o')
|
|
23
|
+
end
|
|
24
|
+
|
|
25
|
+
Then(/^the JSON compilation database should contain an entry with a file$/) do
|
|
26
|
+
json_db[0]['file'].should == '/Users/musalj/code/OSS/ObjectiveSugar/Classes/NSMutableArray+ObjectiveSugar.m'
|
|
27
|
+
end
|
|
28
|
+
|
|
29
|
+
Then(/^the JSON compilation database should contain an entry with a directory$/) do
|
|
30
|
+
json_db[0]['directory'].should == '/'
|
|
31
|
+
end
|
|
32
|
+
|
|
33
|
+
Then(/^the JSON compilation database should be complete$/) do
|
|
34
|
+
entries = json_db.select { |entry| entry['command'] && entry['file'] && entry['directory'] }
|
|
35
|
+
entries.length.should == JSON_DB_FIXTURE_COMMAND_COUNT
|
|
36
|
+
end
|
|
37
|
+
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
Then(/^I should see a failed test node in my report$/) do
|
|
2
|
+
junit_report_root.elements.to_a.detect do |node|
|
|
3
|
+
element = node.elements.to_a.first
|
|
4
|
+
element && element.name == "failure"
|
|
5
|
+
end.should_not be_nil
|
|
6
|
+
end
|
|
7
|
+
|
|
8
|
+
Then(/^I should see a passing test node in my report$/) do
|
|
9
|
+
junit_report_root.elements.to_a.detect do |node|
|
|
10
|
+
node.attributes["time"] != nil
|
|
11
|
+
end.should_not be_nil
|
|
12
|
+
end
|
|
13
|
+
|
|
14
|
+
Then(/^I should see a pending test node in my report$/) do
|
|
15
|
+
junit_report_root.elements.to_a.detect do |node|
|
|
16
|
+
node.elements.to_a.detect { |child| child.name == 'skipped' }
|
|
17
|
+
end.should_not be_nil
|
|
18
|
+
end
|
|
19
|
+
|
|
20
|
+
Then(/^I should see a test suite node$/) do
|
|
21
|
+
junit_report_root.elements.to_a.first.should_not be_nil
|
|
22
|
+
end
|
|
23
|
+
|
|
24
|
+
Then(/^I should see (\d+) tests in my report$/) do |test_count|
|
|
25
|
+
junit_report_root.attributes["tests"].should == test_count
|
|
26
|
+
junit_report_root.elements.to_a.size.should == test_count.to_i
|
|
27
|
+
end
|
|
28
|
+
|
|
29
|
+
Then(/^I should see (\d+) test suites$/) do |count|
|
|
30
|
+
suites = junit_report.root.elements.to_a
|
|
31
|
+
suites.size.should == count.to_i
|
|
32
|
+
suites.count { |s| s.name == 'testsuite' }.should == count.to_i
|
|
33
|
+
end
|
|
34
|
+
|
|
35
|
+
Then(/^I should have a test report at "(.*?)"$/) do |path|
|
|
36
|
+
doc = REXML::Document.new(File.open(path, 'r').read)
|
|
37
|
+
doc.root.should_not be_nil
|
|
38
|
+
end
|
|
39
|
+
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
Given(/^I have tests in my suite from 2 classes$/) do
|
|
2
|
+
add_run_input SAMPLE_OCUNIT_TEST
|
|
3
|
+
add_run_input SAMPLE_KIWI_TEST
|
|
4
|
+
end
|
|
5
|
+
|
|
6
|
+
When(/^I pipe to xcpretty with "(.*?)" and specify a custom path$/) do |args|
|
|
7
|
+
step("I pipe to xcpretty with \"#{args} --output #{custom_report_path}\"")
|
|
8
|
+
end
|
|
9
|
+
|
|
10
|
+
When(/^I pipe to xcpretty with two custom "(.*?)" report paths$/) do |type|
|
|
11
|
+
step("I pipe to xcpretty with \"--report #{type} --output #{custom_report_path} --report #{type} --output #{other_custom_report_path}\"")
|
|
12
|
+
end
|
|
13
|
+
|
|
14
|
+
Then(/^I should have test reports in two custom paths$/) do
|
|
15
|
+
step("I should have a test report at \"#{custom_report_path}\"")
|
|
16
|
+
step("I should have a test report at \"#{other_custom_report_path}\"")
|
|
17
|
+
end
|
|
18
|
+
|
|
19
|
+
Then(/^I should have a test report in a custom path$/) do
|
|
20
|
+
step("I should have a test report at \"#{custom_report_path}\"")
|
|
21
|
+
end
|
|
22
|
+
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
When(/^I run xcpretty$/) do
|
|
2
|
+
@output = `bin/xcpretty 2>&1`
|
|
3
|
+
end
|
|
4
|
+
|
|
5
|
+
When(/^I run xcpretty with (.*)$/) do |flags|
|
|
6
|
+
@output = `bin/xcpretty #{flags}`
|
|
7
|
+
end
|
|
8
|
+
|
|
9
|
+
When(/^I run xcpretty over a big file$/) do
|
|
10
|
+
start_time = Time.now
|
|
11
|
+
@output = `cat features/fixtures/xcodebuild.log | bin/xcpretty -c`
|
|
12
|
+
@xcpretty_run_time = Time.now - start_time
|
|
13
|
+
end
|
|
14
|
+
|
|
15
|
+
Then(/^I should see the help banner$/) do
|
|
16
|
+
run_output.should include("Usage: xcodebuild [options] | xcpretty")
|
|
17
|
+
end
|
|
18
|
+
|
|
19
|
+
Then(/^I should see the xcpretty version$/) do
|
|
20
|
+
run_output.should include(XCPretty::VERSION)
|
|
21
|
+
end
|
|
22
|
+
|
|
23
|
+
Then(/^the exit status code should be (\d)$/) do |numbah|
|
|
24
|
+
$?.exitstatus.should == numbah.to_i
|
|
25
|
+
end
|
|
26
|
+
|
|
27
|
+
Then(/^the performance should be way faster than running cat$/) do
|
|
28
|
+
puts "XCPretty run time: #{@xcpretty_run_time}"
|
|
29
|
+
@xcpretty_run_time.should < 2
|
|
30
|
+
end
|
|
31
|
+
|
|
@@ -0,0 +1,117 @@
|
|
|
1
|
+
$LOAD_PATH.unshift File.expand_path('../../..', __FILE__)
|
|
2
|
+
|
|
3
|
+
require 'tempfile'
|
|
4
|
+
require 'spec/fixtures/constants'
|
|
5
|
+
require 'spec/support/matchers/colors'
|
|
6
|
+
require 'lib/xcpretty/ansi'
|
|
7
|
+
require 'lib/xcpretty/version'
|
|
8
|
+
require 'lib/xcpretty/syntax'
|
|
9
|
+
require 'rexml/document'
|
|
10
|
+
require 'lib/xcpretty/formatters/formatter'
|
|
11
|
+
require 'lib/xcpretty/reporters/junit'
|
|
12
|
+
require 'lib/xcpretty/reporters/html'
|
|
13
|
+
require 'lib/xcpretty/reporters/json_compilation_database'
|
|
14
|
+
|
|
15
|
+
begin
|
|
16
|
+
require 'json'
|
|
17
|
+
rescue LoadError
|
|
18
|
+
require 'vendor/json_pure/parser'
|
|
19
|
+
end
|
|
20
|
+
|
|
21
|
+
include XCPretty::ANSI
|
|
22
|
+
|
|
23
|
+
TEST_RUN_START_MATCHER = /Test Suite .+ started/
|
|
24
|
+
TEST_SUITE_COMPLETION_MATCHER = /Executed \d+ tests, with \d+ failures \(\d+ unexpected\) in \d+\.\d+ \(\d+\.\d+\) seconds/
|
|
25
|
+
TEST_SUITE_START_MATCHER = /[\w]*(Spec|Tests)$/
|
|
26
|
+
TEST_PATH_MATCHER = %r{[\w/\-\s]+:\d+}
|
|
27
|
+
PASSING_TEST_NAME_MATCHER = %r{\w+\s\(\d+\.\d+\sseconds\)}
|
|
28
|
+
PENDING_TEST_NAME_MATCHER = %r{\w+\s\[PENDING\]}
|
|
29
|
+
FAILING_TEST_NAME_MATCHER = %r{\w+, expected:}
|
|
30
|
+
MEASURING_TEST_NAME_MATCHER = %r{\w+\smeasured\s\(\d+\.\d+\sseconds\)}
|
|
31
|
+
|
|
32
|
+
JSON_DB_FIXTURE_COMMAND_COUNT = 557
|
|
33
|
+
|
|
34
|
+
def run_xcpretty(flags)
|
|
35
|
+
input_file = Tempfile.new('xcpretty_input')
|
|
36
|
+
File.open(input_file.path, 'w') do |file|
|
|
37
|
+
file.print run_input
|
|
38
|
+
end
|
|
39
|
+
@output = %x(cat '#{input_file.path}' | bin/xcpretty #{flags})
|
|
40
|
+
input_file.unlink
|
|
41
|
+
end
|
|
42
|
+
|
|
43
|
+
def add_run_input(text)
|
|
44
|
+
run_input << "\n#{text}"
|
|
45
|
+
end
|
|
46
|
+
|
|
47
|
+
def run_input
|
|
48
|
+
@input ||= ''
|
|
49
|
+
end
|
|
50
|
+
|
|
51
|
+
def run_output
|
|
52
|
+
@output ||= ''
|
|
53
|
+
end
|
|
54
|
+
|
|
55
|
+
def html_report
|
|
56
|
+
@html_report ||= REXML::Document.new(File.open(XCPretty::HTML::FILEPATH, 'r').read.sub("<!DOCTYPE html>", ""))
|
|
57
|
+
end
|
|
58
|
+
|
|
59
|
+
def html_report_body
|
|
60
|
+
html_report.root.get_elements('//body').first
|
|
61
|
+
end
|
|
62
|
+
|
|
63
|
+
def html_test_suites
|
|
64
|
+
parent = html_report_body.get_elements("//*[@id='test-suites']/").first
|
|
65
|
+
parent.elements.to_a.select do |e|
|
|
66
|
+
e.attributes['class'] && e.attributes['class'].include?('test-suite')
|
|
67
|
+
end
|
|
68
|
+
end
|
|
69
|
+
|
|
70
|
+
def json_db
|
|
71
|
+
@json ||= JSON.parse(File.open(custom_report_path, 'r').read)
|
|
72
|
+
end
|
|
73
|
+
|
|
74
|
+
def junit_report
|
|
75
|
+
REXML::Document.new(File.open(XCPretty::JUnit::FILEPATH, 'r').read)
|
|
76
|
+
end
|
|
77
|
+
|
|
78
|
+
def junit_report_root
|
|
79
|
+
junit_report.root.elements.to_a.first
|
|
80
|
+
end
|
|
81
|
+
|
|
82
|
+
def custom_report_path
|
|
83
|
+
@custom_report_path ||= begin
|
|
84
|
+
@custom_report_file1 = Tempfile.new('custom_report_path')
|
|
85
|
+
@custom_report_file1.path
|
|
86
|
+
end
|
|
87
|
+
end
|
|
88
|
+
|
|
89
|
+
def other_custom_report_path
|
|
90
|
+
@custom_report_path2 ||= begin
|
|
91
|
+
@custom_report_file2 = Tempfile.new('custom_report_path')
|
|
92
|
+
@custom_report_file2.path
|
|
93
|
+
end
|
|
94
|
+
end
|
|
95
|
+
|
|
96
|
+
def copy_file_to_screenshot_dir(screenshot_file)
|
|
97
|
+
@screenshot_file_path = "#{XCPretty::HTML::SCREENSHOT_DIR}/#{screenshot_file}"
|
|
98
|
+
FileUtils.cp("features/assets/#{screenshot_file}", @screenshot_file_path)
|
|
99
|
+
end
|
|
100
|
+
|
|
101
|
+
Before do
|
|
102
|
+
self.colorize = true
|
|
103
|
+
end
|
|
104
|
+
|
|
105
|
+
After do
|
|
106
|
+
@input = ""
|
|
107
|
+
@output = ""
|
|
108
|
+
@custom_report_file1.unlink if @custom_report_file1
|
|
109
|
+
@custom_report_file2.unlink if @custom_report_file2
|
|
110
|
+
@html_report = nil
|
|
111
|
+
@json = nil
|
|
112
|
+
FileUtils.rm_rf(XCPretty::JUnit::FILEPATH)
|
|
113
|
+
FileUtils.rm_rf(XCPretty::HTML::FILEPATH)
|
|
114
|
+
FileUtils.rm_rf(XCPretty::JSONCompilationDatabase::FILE_PATH)
|
|
115
|
+
File.delete(@screenshot_file_path) if @screenshot_file_path
|
|
116
|
+
end
|
|
117
|
+
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
Feature: Showing Test Anything Protocol-compatible test output
|
|
2
|
+
|
|
3
|
+
Scenario: Showing failed tests
|
|
4
|
+
Given I have a failing test in my suite
|
|
5
|
+
When I pipe to xcpretty with "--tap"
|
|
6
|
+
Then I should see text beginning with "not ok 1"
|
|
7
|
+
|
|
8
|
+
Scenario: Showing passing tests
|
|
9
|
+
Given I have a passing test in my suite
|
|
10
|
+
When I pipe to xcpretty with "--tap"
|
|
11
|
+
Then I should see text beginning with "ok 1"
|
|
12
|
+
|
|
13
|
+
Scenario: Showing pending tests
|
|
14
|
+
Given I have a pending test in my suite
|
|
15
|
+
When I pipe to xcpretty with "--tap"
|
|
16
|
+
Then I should see text containing " # TODO" and beginning with "not ok 1"
|
|
17
|
+
|
|
18
|
+
Scenario: Showing how many tests completed
|
|
19
|
+
Given I have a pending test in my suite
|
|
20
|
+
And I have a passing test in my suite
|
|
21
|
+
And I have a failing test in my suite
|
|
22
|
+
And the test suite has finished
|
|
23
|
+
When I pipe to xcpretty with "--tap"
|
|
24
|
+
Then I should see text matching "1..3"
|
|
25
|
+
|
|
26
|
+
Scenario: Showing a running test counter
|
|
27
|
+
Given I have a passing test in my suite
|
|
28
|
+
And I have a failing test in my suite
|
|
29
|
+
When I pipe to xcpretty with "--tap"
|
|
30
|
+
Then I should see text beginning with "ok 1"
|
|
31
|
+
And I should see text beginning with "not ok 2"
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
Feature: Showing RSpec-style test output
|
|
2
|
+
|
|
3
|
+
Scenario: Showing failed tests
|
|
4
|
+
Given I have a failing test in my suite
|
|
5
|
+
When I pipe to xcpretty with "--test --no-color"
|
|
6
|
+
Then I should see a failed test icon
|
|
7
|
+
|
|
8
|
+
Scenario: Showing passing tests
|
|
9
|
+
Given I have a passing test in my suite
|
|
10
|
+
When I pipe to xcpretty with "--test --no-color"
|
|
11
|
+
Then I should see a passing test icon in ASCII
|
|
12
|
+
|
|
13
|
+
Scenario: Showing pending tests
|
|
14
|
+
Given I have a pending test in my suite
|
|
15
|
+
When I pipe to xcpretty with "--test --no-color"
|
|
16
|
+
Then I should see a pending test icon in ASCII
|
|
17
|
+
|
|
18
|
+
Scenario: Showing measuring tests
|
|
19
|
+
Given I have a measuring test in my suite
|
|
20
|
+
When I pipe to xcpretty with "--test --no-color"
|
|
21
|
+
Then I should see a measuring test icon in ASCII
|
|
22
|
+
|
|
23
|
+
Scenario: Showing some tests failed with color
|
|
24
|
+
Given I have a failing test in my suite
|
|
25
|
+
And the test suite has finished
|
|
26
|
+
When I pipe to xcpretty with "--test --color"
|
|
27
|
+
Then I should see a red failed test icon
|
|
28
|
+
And I should see the path of a failed test
|
|
29
|
+
And the final execution message should be red
|
|
30
|
+
|
|
31
|
+
Scenario: Showing passing tests with color
|
|
32
|
+
Given I have a passing test in my suite
|
|
33
|
+
When I pipe to xcpretty with "--test --color"
|
|
34
|
+
Then I should see a green passing test icon
|
|
35
|
+
|
|
36
|
+
Scenario: Showing pending tests with color
|
|
37
|
+
Given I have a pending test in my suite
|
|
38
|
+
When I pipe to xcpretty with "--test --color"
|
|
39
|
+
Then I should see a yellow pending test icon
|
|
40
|
+
|
|
41
|
+
Scenario: Showing measuring tests with color
|
|
42
|
+
Given I have a measuring test in my suite
|
|
43
|
+
When I pipe to xcpretty with "--test --color"
|
|
44
|
+
Then I should see a yellow measuring test icon
|
|
45
|
+
|
|
46
|
+
Scenario: Showing that all tests passed with color
|
|
47
|
+
Given all of my tests will pass in my suite
|
|
48
|
+
When I pipe to xcpretty with "--test --color"
|
|
49
|
+
Then the final execution message should be green
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
Feature: CLI behavior
|
|
2
|
+
|
|
3
|
+
Scenario: Starting xcpretty without any flags
|
|
4
|
+
When I run xcpretty
|
|
5
|
+
Then I should see the help banner
|
|
6
|
+
|
|
7
|
+
Scenario: Starting xcpretty with version
|
|
8
|
+
When I run xcpretty with -v
|
|
9
|
+
Then I should see the xcpretty version
|
|
10
|
+
|
|
11
|
+
Scenario: Performance
|
|
12
|
+
When I run xcpretty over a big file
|
|
13
|
+
Then the performance should be way faster than running cat
|
|
14
|
+
|
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
module XCPretty
|
|
2
|
+
module ANSI
|
|
3
|
+
|
|
4
|
+
attr_accessor :colorize
|
|
5
|
+
|
|
6
|
+
FORMATTED_MATCHER = %r{\e\[(\d+)[;]?(\d+)?m(.*)\e\[0m}
|
|
7
|
+
|
|
8
|
+
EFFECT = {
|
|
9
|
+
reset: '0',
|
|
10
|
+
bold: '1',
|
|
11
|
+
underline: '4'
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
COLORS = {
|
|
15
|
+
black: '30',
|
|
16
|
+
red: '31',
|
|
17
|
+
green: '32',
|
|
18
|
+
yellow: '33',
|
|
19
|
+
blue: '34',
|
|
20
|
+
cyan: '36',
|
|
21
|
+
white: '37',
|
|
22
|
+
plain: '39'
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
def colorize?
|
|
26
|
+
!!@colorize
|
|
27
|
+
end
|
|
28
|
+
|
|
29
|
+
def white(text)
|
|
30
|
+
ansi_parse(text, :plain, :bold)
|
|
31
|
+
end
|
|
32
|
+
|
|
33
|
+
def red(text)
|
|
34
|
+
ansi_parse(text, :red)
|
|
35
|
+
end
|
|
36
|
+
|
|
37
|
+
def green(text)
|
|
38
|
+
ansi_parse(text, :green, :bold)
|
|
39
|
+
end
|
|
40
|
+
|
|
41
|
+
def cyan(text)
|
|
42
|
+
ansi_parse(text, :cyan)
|
|
43
|
+
end
|
|
44
|
+
|
|
45
|
+
def yellow(text)
|
|
46
|
+
ansi_parse(text, :yellow)
|
|
47
|
+
end
|
|
48
|
+
|
|
49
|
+
def applied_effects(text)
|
|
50
|
+
effects = []
|
|
51
|
+
if text =~ FORMATTED_MATCHER
|
|
52
|
+
colors = COLORS.invert[$1]
|
|
53
|
+
effect = EFFECT.invert[$2]
|
|
54
|
+
effects << colors if colors
|
|
55
|
+
effects << effect if effect
|
|
56
|
+
end
|
|
57
|
+
effects
|
|
58
|
+
end
|
|
59
|
+
|
|
60
|
+
def strip(text)
|
|
61
|
+
text =~ FORMATTED_MATCHER ? $3 : text
|
|
62
|
+
end
|
|
63
|
+
|
|
64
|
+
def ansi_parse(text, color, effect=nil)
|
|
65
|
+
return text unless colorize?
|
|
66
|
+
colors_code = COLORS[color] || ''
|
|
67
|
+
effect_code = EFFECT[effect] ? ';' + EFFECT[effect] : ''
|
|
68
|
+
"\e[#{colors_code}#{effect_code}m#{text}\e[#{EFFECT[:reset]}m"
|
|
69
|
+
end
|
|
70
|
+
end
|
|
71
|
+
end
|
|
72
|
+
|
|
@@ -0,0 +1,177 @@
|
|
|
1
|
+
# encoding: utf-8
|
|
2
|
+
require 'xcpretty/ansi'
|
|
3
|
+
require 'xcpretty/parser'
|
|
4
|
+
|
|
5
|
+
module XCPretty
|
|
6
|
+
|
|
7
|
+
# Making a new formatter is easy.
|
|
8
|
+
# Just make a subclass of Formatter, and override any of these methods.
|
|
9
|
+
module FormatMethods
|
|
10
|
+
EMPTY = ''.freeze
|
|
11
|
+
|
|
12
|
+
def format_analyze(file_name, file_path); EMPTY; end
|
|
13
|
+
def format_build_target(target, project, configuration); EMPTY; end
|
|
14
|
+
def format_analyze_target(target, project, configuration); EMPTY; end
|
|
15
|
+
def format_check_dependencies; EMPTY; end
|
|
16
|
+
def format_clean(project, target, configuration); EMPTY; end
|
|
17
|
+
def format_clean_target(target, project, configuration); EMPTY; end
|
|
18
|
+
def format_clean_remove; EMPTY; end
|
|
19
|
+
def format_compile(file_name, file_path); EMPTY; end
|
|
20
|
+
def format_compile_command(compiler_command, file_path); EMPTY; end
|
|
21
|
+
def format_compile_storyboard(file_name, file_path); EMPTY; end
|
|
22
|
+
def format_compile_xib(file_name, file_path); EMPTY; end
|
|
23
|
+
def format_copy_header_file(source, target); EMPTY; end
|
|
24
|
+
def format_copy_plist_file(source, target); EMPTY; end
|
|
25
|
+
def format_copy_strings_file(file_name); EMPTY; end
|
|
26
|
+
def format_cpresource(file); EMPTY; end
|
|
27
|
+
def format_generate_dsym(dsym); EMPTY; end
|
|
28
|
+
def format_linking(file, build_variant, arch); EMPTY; end
|
|
29
|
+
def format_libtool(library); EMPTY; end
|
|
30
|
+
def format_passing_test(suite, test, time); EMPTY; end
|
|
31
|
+
def format_pending_test(suite, test); EMPTY; end
|
|
32
|
+
def format_measuring_test(suite, test, time); EMPTY; end
|
|
33
|
+
def format_failing_test(suite, test, time, file_path); EMPTY; end
|
|
34
|
+
def format_process_pch(file); EMPTY; end
|
|
35
|
+
def format_process_pch_command(file_path); EMPTY; end
|
|
36
|
+
def format_phase_success(phase_name); EMPTY; end
|
|
37
|
+
def format_phase_script_execution(phase_name); EMPTY; end
|
|
38
|
+
def format_phase_script_error(error, text); EMPTY; end
|
|
39
|
+
def format_process_info_plist(file_name, file_path); EMPTY; end
|
|
40
|
+
def format_codesign(file); EMPTY; end
|
|
41
|
+
def format_preprocess(file); EMPTY; end
|
|
42
|
+
def format_pbxcp(file); EMPTY; end
|
|
43
|
+
def format_shell_command(command, arguments); EMPTY; end
|
|
44
|
+
def format_test_run_started(name); EMPTY; end
|
|
45
|
+
def format_test_run_finished(name, time); EMPTY; end
|
|
46
|
+
def format_test_suite_started(name); EMPTY; end
|
|
47
|
+
def format_test_summary(message, failures_per_suite); EMPTY; end
|
|
48
|
+
def format_touch(file_path, file_name); EMPTY; end
|
|
49
|
+
def format_tiffutil(file); EMPTY; end
|
|
50
|
+
def format_write_file(file); EMPTY; end
|
|
51
|
+
def format_write_auxiliary_files; EMPTY; end
|
|
52
|
+
|
|
53
|
+
# COMPILER / LINKER ERRORS AND WARNINGS
|
|
54
|
+
def format_compile_error(file_name, file_path, reason,
|
|
55
|
+
line, cursor); EMPTY; end
|
|
56
|
+
def format_error(message); EMPTY; end
|
|
57
|
+
def format_file_missing_error(error, file_path); EMPTY; end
|
|
58
|
+
def format_undefined_symbols(message, symbol, reference); EMPTY; end
|
|
59
|
+
def format_duplicate_symbols(message, file_paths); EMPTY; end
|
|
60
|
+
def format_warning(message); message; end
|
|
61
|
+
|
|
62
|
+
# TODO: see how we can unify format_error and format_compile_error,
|
|
63
|
+
# the same for warnings
|
|
64
|
+
def format_compile_warning(file_name, file_path, reason,
|
|
65
|
+
line, cursor); EMPTY; end
|
|
66
|
+
end
|
|
67
|
+
|
|
68
|
+
class Formatter
|
|
69
|
+
|
|
70
|
+
include ANSI
|
|
71
|
+
include FormatMethods
|
|
72
|
+
|
|
73
|
+
attr_reader :parser
|
|
74
|
+
|
|
75
|
+
def initialize(use_unicode, colorize)
|
|
76
|
+
@use_unicode = use_unicode
|
|
77
|
+
@colorize = colorize
|
|
78
|
+
@parser = Parser.new(self)
|
|
79
|
+
end
|
|
80
|
+
|
|
81
|
+
# Override if you want to catch something specific with your regex
|
|
82
|
+
def pretty_format(text)
|
|
83
|
+
parser.parse(text)
|
|
84
|
+
end
|
|
85
|
+
|
|
86
|
+
# If you want to print inline, override #optional_newline with ''
|
|
87
|
+
def optional_newline
|
|
88
|
+
"\n"
|
|
89
|
+
end
|
|
90
|
+
|
|
91
|
+
def use_unicode?
|
|
92
|
+
!!@use_unicode
|
|
93
|
+
end
|
|
94
|
+
|
|
95
|
+
# Will be printed by default. Override with '' if you don't want summary
|
|
96
|
+
def format_test_summary(executed_message, failures_per_suite)
|
|
97
|
+
failures = format_failures(failures_per_suite)
|
|
98
|
+
if failures.empty?
|
|
99
|
+
final_message = green(executed_message)
|
|
100
|
+
else
|
|
101
|
+
final_message = red(executed_message)
|
|
102
|
+
end
|
|
103
|
+
|
|
104
|
+
text = [failures, final_message].join("\n\n\n").strip
|
|
105
|
+
"\n\n#{text}"
|
|
106
|
+
end
|
|
107
|
+
|
|
108
|
+
ERROR = '❌ '
|
|
109
|
+
ASCII_ERROR = '[x]'
|
|
110
|
+
|
|
111
|
+
WARNING = '⚠️ '
|
|
112
|
+
ASCII_WARNING = '[!]'
|
|
113
|
+
|
|
114
|
+
def format_error(message)
|
|
115
|
+
"\n#{red(error_symbol + " " + message)}\n\n"
|
|
116
|
+
end
|
|
117
|
+
|
|
118
|
+
def format_compile_error(file, file_path, reason, line, cursor)
|
|
119
|
+
"\n#{red(error_symbol + " ")}#{file_path}: #{red(reason)}\n\n" \
|
|
120
|
+
"#{line}\n#{cyan(cursor)}\n\n"
|
|
121
|
+
end
|
|
122
|
+
|
|
123
|
+
def format_file_missing_error(reason, file_path)
|
|
124
|
+
"\n#{red(error_symbol + " " + reason)} #{file_path}\n\n"
|
|
125
|
+
end
|
|
126
|
+
|
|
127
|
+
def format_compile_warning(file, file_path, reason, line, cursor)
|
|
128
|
+
"\n#{yellow(warning_symbol + ' ')}#{file_path}: #{yellow(reason)}\n\n" \
|
|
129
|
+
"#{line}\n#{cyan(cursor)}\n\n"
|
|
130
|
+
end
|
|
131
|
+
|
|
132
|
+
def format_ld_warning(reason)
|
|
133
|
+
"#{yellow(warning_symbol + ' ' + reason)}"
|
|
134
|
+
end
|
|
135
|
+
|
|
136
|
+
def format_undefined_symbols(message, symbol, reference)
|
|
137
|
+
"\n#{red(error_symbol + " " + message)}\n" \
|
|
138
|
+
"> Symbol: #{symbol}\n" \
|
|
139
|
+
"> Referenced from: #{reference}\n\n"
|
|
140
|
+
end
|
|
141
|
+
|
|
142
|
+
def format_duplicate_symbols(message, file_paths)
|
|
143
|
+
"\n#{red(error_symbol + " " + message)}\n" \
|
|
144
|
+
"> #{file_paths.map { |path| path.split('/').last }.join("\n> ")}\n"
|
|
145
|
+
end
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
private
|
|
149
|
+
|
|
150
|
+
def format_failures(failures_per_suite)
|
|
151
|
+
failures_per_suite.map do |suite, failures|
|
|
152
|
+
formatted_failures = failures.map do |failure|
|
|
153
|
+
format_failure(failure)
|
|
154
|
+
end.join("\n\n")
|
|
155
|
+
|
|
156
|
+
"\n#{suite}\n#{formatted_failures}"
|
|
157
|
+
end.join("\n")
|
|
158
|
+
end
|
|
159
|
+
|
|
160
|
+
def format_failure(f)
|
|
161
|
+
" #{f[:test_case]}, #{red(f[:reason])}\n #{cyan(f[:file_path])}\n" \
|
|
162
|
+
" ```\n" +
|
|
163
|
+
Syntax.highlight(Snippet.from_filepath(f[:file_path])) +
|
|
164
|
+
" ```"
|
|
165
|
+
end
|
|
166
|
+
|
|
167
|
+
def error_symbol
|
|
168
|
+
use_unicode? ? ERROR : ASCII_ERROR
|
|
169
|
+
end
|
|
170
|
+
|
|
171
|
+
def warning_symbol
|
|
172
|
+
use_unicode? ? WARNING : ASCII_WARNING
|
|
173
|
+
end
|
|
174
|
+
|
|
175
|
+
end
|
|
176
|
+
end
|
|
177
|
+
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
module XCPretty
|
|
2
|
+
|
|
3
|
+
class Knock < Formatter
|
|
4
|
+
|
|
5
|
+
FAIL = 'not ok'
|
|
6
|
+
PASS = 'ok'
|
|
7
|
+
|
|
8
|
+
def format_passing_test(suite, test_case, time)
|
|
9
|
+
"#{PASS} - #{test_case}"
|
|
10
|
+
end
|
|
11
|
+
|
|
12
|
+
def format_failing_test(test_suite, test_case, reason, file)
|
|
13
|
+
"#{FAIL} - #{test_case}: FAILED" +
|
|
14
|
+
format_failure_diagnostics(test_suite, test_case, reason, file)
|
|
15
|
+
end
|
|
16
|
+
|
|
17
|
+
def format_test_summary(executed_message, failures_per_suite)
|
|
18
|
+
''
|
|
19
|
+
end
|
|
20
|
+
|
|
21
|
+
def format_failure_diagnostics(test_suite, test_case, reason, file)
|
|
22
|
+
format_diagnostics(reason) +
|
|
23
|
+
format_diagnostics(" #{file}: #{test_suite} - #{test_case}")
|
|
24
|
+
end
|
|
25
|
+
|
|
26
|
+
private
|
|
27
|
+
|
|
28
|
+
def format_diagnostics(text)
|
|
29
|
+
"\n# #{text}"
|
|
30
|
+
end
|
|
31
|
+
|
|
32
|
+
end
|
|
33
|
+
|
|
34
|
+
end
|
|
35
|
+
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
module XCPretty
|
|
2
|
+
|
|
3
|
+
class RSpec < Formatter
|
|
4
|
+
|
|
5
|
+
FAIL = "F"
|
|
6
|
+
PASS = "."
|
|
7
|
+
PENDING = "P"
|
|
8
|
+
MEASURING = 'T'
|
|
9
|
+
|
|
10
|
+
def optional_newline
|
|
11
|
+
''
|
|
12
|
+
end
|
|
13
|
+
|
|
14
|
+
def format_passing_test(suite, test_case, time)
|
|
15
|
+
green(PASS)
|
|
16
|
+
end
|
|
17
|
+
|
|
18
|
+
def format_failing_test(test_suite, test_case, reason, file)
|
|
19
|
+
red(FAIL)
|
|
20
|
+
end
|
|
21
|
+
|
|
22
|
+
def format_pending_test(suite, test_case)
|
|
23
|
+
yellow(PENDING)
|
|
24
|
+
end
|
|
25
|
+
|
|
26
|
+
def format_measuring_test(suite, test_case, time)
|
|
27
|
+
yellow(MEASURING)
|
|
28
|
+
end
|
|
29
|
+
|
|
30
|
+
end
|
|
31
|
+
|
|
32
|
+
end
|
|
33
|
+
|