bddfire 1.9.7 → 1.9.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (81) hide show
  1. checksums.yaml +5 -13
  2. data/Gemfile +1 -2
  3. data/README.markdown +68 -37
  4. data/Rakefile +4 -4
  5. data/bddfire.gemspec +19 -16
  6. data/bin/bddfire +116 -106
  7. data/features/step_definitions/all_steps.rb +1 -4
  8. data/features/support/env.rb +4 -4
  9. data/lib/bddfire/accesibility/axe.rb +40 -0
  10. data/lib/bddfire/assertions/assert.rb +2 -2
  11. data/lib/bddfire/page-objects/HomePage.rb +7 -9
  12. data/lib/bddfire/require.rb +5 -4
  13. data/lib/bddfire/version.rb +1 -1
  14. data/lib/bddfire/web/browser_actions.rb +1 -1
  15. data/lib/bddfire/web/headless_steps.rb +9 -10
  16. data/lib/bddfire/web/web_methods.rb +4 -5
  17. data/lib/bddfire/web/web_steps.rb +3 -3
  18. data/lib/bddfire.rb +1 -3
  19. data/pre-defined-steps/accessibility_steps.md +24 -0
  20. data/pre-defined-steps/capybara_steps.md +1 -1
  21. data/scaffold/accessibility/features/01_validation.feature +6 -0
  22. data/scaffold/accessibility/features/02_javascript.feature +6 -0
  23. data/scaffold/accessibility/features/03_language.feature +9 -0
  24. data/scaffold/accessibility/features/04_page_titles.feature +7 -0
  25. data/scaffold/accessibility/features/05_main_landmark.feature +7 -0
  26. data/scaffold/accessibility/features/06_headings.feature +7 -0
  27. data/scaffold/accessibility/features/07_minimum_resizable_text.feature +8 -0
  28. data/scaffold/accessibility/features/08_tabindex.feature +6 -0
  29. data/scaffold/accessibility/features/09_focusable_controls.feature +9 -0
  30. data/scaffold/accessibility/features/10_color_contrast.feature +10 -0
  31. data/scaffold/accessibility/features/11_image.feature +10 -0
  32. data/scaffold/accessibility/features/12_form.feature +11 -0
  33. data/scaffold/accessibility/features/13_table.feature +10 -0
  34. data/scaffold/accessibility/features/general_accessibility_check.feature +9 -0
  35. data/scaffold/accessibility/features/step_definitions/base.rb +22 -0
  36. data/scaffold/accessibility/features/step_definitions/form.rb +0 -0
  37. data/scaffold/accessibility/features/step_definitions/headings.rb +7 -0
  38. data/scaffold/accessibility/features/step_definitions/image.rb +0 -0
  39. data/scaffold/accessibility/features/step_definitions/javascript.rb +3 -0
  40. data/scaffold/accessibility/features/step_definitions/language.rb +0 -0
  41. data/scaffold/accessibility/features/step_definitions/main_landmark.rb +15 -0
  42. data/scaffold/accessibility/features/step_definitions/minimum_resizable_text.rb +11 -0
  43. data/scaffold/accessibility/features/step_definitions/page_title.rb +10 -0
  44. data/scaffold/accessibility/features/step_definitions/tabindex.rb +3 -0
  45. data/scaffold/accessibility/features/step_definitions/table.rb +0 -0
  46. data/scaffold/accessibility/features/step_definitions/validation.rb +4 -0
  47. data/scaffold/accessibility/features/support/capybara.rb +42 -0
  48. data/scaffold/config_files/.rubocop.yml +5 -5
  49. data/scaffold/config_files/Dockerfile +24 -0
  50. data/scaffold/config_files/Gemfile +9 -5
  51. data/scaffold/config_files/Rakefile +36 -32
  52. data/scaffold/config_files/docker.sh +82 -0
  53. data/scaffold/features/pages/HomePage.rb +10 -10
  54. data/scaffold/features/support/env.rb +128 -130
  55. data/scaffold/features/support/hooks.rb +5 -6
  56. data/scaffold/load/Dockerfile +28 -0
  57. data/scaffold/load/README.md +31 -0
  58. data/scaffold/load/conf/application.conf +7 -0
  59. data/scaffold/load/conf/gatling.conf +157 -0
  60. data/scaffold/load/conf/logback.xml +22 -0
  61. data/scaffold/load/conf/recorder.conf +51 -0
  62. data/scaffold/load/docker-jenkins.sh +77 -0
  63. data/scaffold/load/gatling_jenkins.sh +28 -0
  64. data/scaffold/load/gatling_local.sh +29 -0
  65. data/scaffold/load/user-files/bodies/.keep +0 -0
  66. data/scaffold/load/user-files/data/search.csv +2 -0
  67. data/scaffold/load/user-files/simulations/SampleLoadTEST.scala +22 -0
  68. data/scaffold/rake_tasks/cucumber.rb +2 -3
  69. data/scaffold/rake_tasks/cuke_sniffer.rb +3 -3
  70. data/scaffold/rake_tasks/rspec.rb +3 -4
  71. data/scaffold/rake_tasks/rubocop.rb +4 -5
  72. data/scaffold/rake_tasks/yard.rb +3 -4
  73. metadata +142 -55
  74. data/scaffold/config_files/.ruby-version +0 -1
  75. data/scaffold/config_files/.travis.yml +0 -16
  76. data/scaffold/features/pages/Abstract.rb +0 -14
  77. data/scaffold/features/support/helpers.erb +0 -6
  78. data/scaffold/features/support/responsive.rb +0 -19
  79. data/scaffold/lib/project/version.erb +0 -3
  80. data/scaffold/lib/project.erb +0 -2
  81. data/scaffold/spec/spec_helper.rb +0 -8
@@ -0,0 +1,157 @@
1
+ #########################
2
+ # Gatling Configuration #
3
+ #########################
4
+
5
+ # This file contains all the settings configurable for Gatling with their default values
6
+
7
+ gatling {
8
+ core {
9
+ #outputDirectoryBaseName = "" # The prefix for each simulation result folder (then suffixed by the report generation timestamp)
10
+ #runDescription = "" # The description for this simulation run, displayed in each report
11
+ #encoding = "utf-8" # Encoding to use throughout Gatling for file and string manipulation
12
+ simulationClass = "BBCSampleTEST" # The FQCN of the simulation to run (when used in conjunction with noReports, the simulation for which assertions will be validated)
13
+ #mute = false # When set to true, don't ask for simulation name nor run description (currently only used by Gatling SBT plugin)
14
+
15
+ extract {
16
+ regex {
17
+ #cacheMaxCapacity = 200 # Cache size for the compiled regexes, set to 0 to disable caching
18
+ }
19
+ xpath {
20
+ #cacheMaxCapacity = 200 # Cache size for the compiled XPath queries, set to 0 to disable caching
21
+ }
22
+ jsonPath {
23
+ #cacheMaxCapacity = 200 # Cache size for the compiled jsonPath queries, set to 0 to disable caching
24
+ #preferJackson = false # When set to true, prefer Jackson over Boon for JSON-related operations
25
+ jackson {
26
+ #allowComments = false # Allow comments in JSON files
27
+ #allowUnquotedFieldNames = false # Allow unquoted JSON fields names
28
+ #allowSingleQuotes = false # Allow single quoted JSON field names
29
+ }
30
+
31
+ }
32
+ css {
33
+ #cacheMaxCapacity = 200 # Cache size for the compiled CSS selectors queries, set to 0 to disable caching
34
+ }
35
+ }
36
+
37
+ timeOut {
38
+ #simulation = 8640000 # Absolute timeout, in seconds, of a simulation
39
+ }
40
+ directory {
41
+ #data = user-files/data # Folder where user's data (e.g. files used by Feeders) is located
42
+ #bodies = user-files/bodies # Folder where bodies are located
43
+ #simulations = user-files/simulations # Folder where the bundle's simulations are located
44
+ #reportsOnly = "" # If set, name of report folder to look for in order to generate its report
45
+ #binaries = "" # If set, name of the folder where compiles classes are located: Defaults to GATLING_HOME/target.
46
+ #results = results # Name of the folder where all reports folder are located
47
+ }
48
+ }
49
+ charting {
50
+ #noReports = false # When set to true, don't generate HTML reports
51
+ #maxPlotPerSeries = 1000 # Number of points per graph in Gatling reports
52
+ #accuracy = 10 # Accuracy, in milliseconds, of the report's stats
53
+ indicators {
54
+ #lowerBound = 800 # Lower bound for the requests' response time to track in the reports and the console summary
55
+ #higherBound = 1200 # Higher bound for the requests' response time to track in the reports and the console summary
56
+ #percentile1 = 50 # Value for the 1st percentile to track in the reports, the console summary and GraphiteDataWriter
57
+ #percentile2 = 75 # Value for the 2nd percentile to track in the reports, the console summary and GraphiteDataWriter
58
+ #percentile3 = 95 # Value for the 3rd percentile to track in the reports, the console summary and GraphiteDataWriter
59
+ #percentile4 = 99 # Value for the 4th percentile to track in the reports, the console summary and GraphiteDataWriter
60
+ }
61
+ }
62
+ http {
63
+ #elFileBodiesCacheMaxCapacity = 200 # Cache size for request body EL templates, set to 0 to disable
64
+ #rawFileBodiesCacheMaxCapacity = 200 # Cache size for request body Raw templates, set to 0 to disable
65
+ #fetchedCssCacheMaxCapacity = 200 # Cache size for CSS parsed content, set to 0 to disable
66
+ #fetchedHtmlCacheMaxCapacity = 200 # Cache size for HTML parsed content, set to 0 to disable
67
+ #redirectPerUserCacheMaxCapacity = 200 # Per virtual user cache size for permanent redirects, set to 0 to disable
68
+ #expirePerUserCacheMaxCapacity = 200 # Per virtual user cache size for permanent 'Expire' headers, set to 0 to disable
69
+ #lastModifiedPerUserCacheMaxCapacity = 200 # Per virtual user cache size for permanent 'Last-Modified' headers, set to 0 to disable
70
+ #etagPerUserCacheMaxCapacity = 200 # Per virtual user cache size for permanent ETag headers, set to 0 to disable
71
+ #warmUpUrl = "http://gatling.io" # The URL to use to warm-up the HTTP stack (blank means disabled)
72
+ #enableGA = true # Very light Google Analytics, please support
73
+ ssl {
74
+ trustStore {
75
+ #type = "" # Type of SSLContext's TrustManagers store
76
+ #file = "" # Location of SSLContext's TrustManagers store
77
+ #password = "" # Password for SSLContext's TrustManagers store
78
+ #algorithm = "" # Algorithm used by SSLContext's TrustManagers store
79
+ }
80
+ keyStore {
81
+ #type = "" # Type of SSLContext's KeyManagers store
82
+ #file = "" # Location of SSLContext's KeyManagers store
83
+ #password = "" # Password for SSLContext's KeyManagers store
84
+ #algorithm = "" # Algorithm used SSLContext's KeyManagers store
85
+ }
86
+ }
87
+ ahc {
88
+ #allowPoolingConnections = true # Allow pooling HTTP connections (keep-alive header automatically added)
89
+ #allowPoolingSslConnections = true # Allow pooling HTTPS connections (keep-alive header automatically added)
90
+ #compressionEnforced = false # Enforce gzip/deflate when Accept-Encoding header is not defined
91
+ #connectTimeout = 60000 # Timeout when establishing a connection
92
+ #pooledConnectionIdleTimeout = 60000 # Timeout when a connection stays unused in the pool
93
+ #readTimeout = 60000 # Timeout when a used connection stays idle
94
+ #connectionTTL = -1 # Max duration a connection can stay open (-1 means no limit)
95
+ #ioThreadMultiplier = 2 # Number of Netty worker threads per core
96
+ #maxConnectionsPerHost = -1 # Max number of connections per host (-1 means no limit)
97
+ #maxConnections = -1 # Max number of connections (-1 means no limit)
98
+ #maxRetry = 2 # Number of times that a request should be tried again
99
+ #requestTimeout = 60000 # Timeout of the requests
100
+ #useProxyProperties = false # When set to true, supports standard Proxy System properties
101
+ #webSocketTimeout = 60000 # Timeout when a used websocket connection stays idle
102
+ #useRelativeURIsWithConnectProxies = true # When set to true, use relative URIs when talking with an SSL proxy or a WebSocket proxy
103
+ #acceptAnyCertificate = true # When set to true, doesn't validate SSL certificates
104
+ #httpClientCodecMaxInitialLineLength = 4096 # Maximum length of the initial line of the response (e.g. "HTTP/1.0 200 OK")
105
+ #httpClientCodecMaxHeaderSize = 8192 # Maximum size, in bytes, of each request's headers
106
+ #httpClientCodecMaxChunkSize = 8192 # Maximum length of the content or each chunk
107
+ #keepEncodingHeader = true # Don't drop Encoding response header after decoding
108
+ #webSocketMaxFrameSize = 10240 # Maximum frame payload size
109
+ #httpsEnabledProtocols = "TLSv1.2, TLSv1.1, TLSv1" # Comma separated enabled protocols for HTTPS, if empty use the JDK defaults
110
+ #httpsEnabledCipherSuites = "" # Comma separated enabled cipher suites for HTTPS, if empty use the JDK defaults
111
+ #sslSessionCacheSize = 20000 # SSLSession cache size (set to 0 to disable)
112
+ #sslSessionTimeout = 86400 # SSLSession timeout (default is 24, like Hotspot)
113
+ }
114
+ }
115
+ data {
116
+ #writers = "console, file" # The lists of DataWriters to which Gatling write simulation data (currently supported : "console", "file", "graphite", "jdbc")
117
+ #reader = file # The DataReader used by the charting engine for reading simulation results
118
+ console {
119
+ #light = false # When set to true, displays a light version without detailed request stats
120
+ }
121
+ file {
122
+ #bufferSize = 8192 # FileDataWriter's internal data buffer size, in bytes
123
+ }
124
+ leak {
125
+ #noActivityTimeout = 30 # Period, in seconds, for which Gatling may have no activity before considering a leak may be happening
126
+ }
127
+ jdbc {
128
+ db {
129
+ #url = "jdbc:mysql://localhost:3306/temp" # The JDBC URL used by the JDBC DataWriter
130
+ #username = "root" # The database user used by the JDBC DataWriter
131
+ #password = "123123q" # The password for the specified user
132
+ }
133
+ #bufferSize = 20 # The size for each batch of SQL inserts to send to the database
134
+ create {
135
+ #createRunRecordTable = "CREATE TABLE IF NOT EXISTS `RunRecords` ( `id` INT NOT NULL AUTO_INCREMENT , `runDate` DATETIME NULL , `simulationId` VARCHAR(45) NULL , `runDescription` VARCHAR(45) NULL , PRIMARY KEY (`id`) )"
136
+ #createRequestRecordTable = "CREATE TABLE IF NOT EXISTS `RequestRecords` (`id` int(11) NOT NULL AUTO_INCREMENT, `runId` int DEFAULT NULL, `scenario` varchar(45) DEFAULT NULL, `userId` VARCHAR(30) NULL, `name` varchar(50) DEFAULT NULL, `requestStartDate` bigint DEFAULT NULL, `requestEndDate` bigint DEFAULT NULL, `responseStartDate` bigint DEFAULT NULL, `responseEndDate` bigint DEFAULT NULL, `status` varchar(2) DEFAULT NULL, `message` varchar(4500) DEFAULT NULL, `responseTime` bigint DEFAULT NULL, PRIMARY KEY (`id`) )"
137
+ #createScenarioRecordTable = "CREATE TABLE IF NOT EXISTS `ScenarioRecords` (`id` int(11) NOT NULL AUTO_INCREMENT, `runId` int DEFAULT NULL, `scenarioName` varchar(45) DEFAULT NULL, `userId` VARCHAR(30) NULL, `event` varchar(50) DEFAULT NULL, `startDate` bigint DEFAULT NULL, `endDate` bigint DEFAULT NULL, PRIMARY KEY (`id`) )"
138
+ #createGroupRecordTable = "CREATE TABLE IF NOT EXISTS `GroupRecords` (`id` int(11) NOT NULL AUTO_INCREMENT, `runId` int DEFAULT NULL, `scenarioName` varchar(45) DEFAULT NULL, `userId` VARCHAR(30) NULL, `entryDate` bigint DEFAULT NULL, `exitDate` bigint DEFAULT NULL, `status` varchar(2) DEFAULT NULL, PRIMARY KEY (`id`) )"
139
+ }
140
+ insert {
141
+ #insertRunRecord = "INSERT INTO RunRecords (runDate, simulationId, runDescription) VALUES (?,?,?)"
142
+ #insertRequestRecord = "INSERT INTO RequestRecords (runId, scenario, userId, name, requestStartDate, requestEndDate, responseStartDate, responseEndDate, status, message, responseTime) VALUES (?,?,?,?,?,?,?,?,?,?,?)"
143
+ #insertScenarioRecord = "INSERT INTO ScenarioRecords (runId, scenarioName, userId, event, startDate, endDate) VALUES (?,?,?,?,?,?)"
144
+ #insertGroupRecord = "INSERT INTO GroupRecords (runId, scenarioName, userId, entryDate, exitDate, status) VALUES (?,?,?,?,?,?)"
145
+ }
146
+ }
147
+ graphite {
148
+ #light = false # only send the all* stats
149
+ #host = "localhost" # The host where the Carbon server is located
150
+ #port = 2003 # The port to which the Carbon server listens to
151
+ #protocol = "tcp" # The protocol used to send data to Carbon (currently supported : "tcp", "udp")
152
+ #rootPathPrefix = "gatling" # The common prefix of all metrics sent to Graphite
153
+ #bufferSize = 8192 # GraphiteDataWriter's internal data buffer size, in bytes
154
+ #writeInterval = 1 # GraphiteDataWriter's write interval, in seconds
155
+ }
156
+ }
157
+ }
@@ -0,0 +1,22 @@
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <configuration>
3
+
4
+ <appender name="CONSOLE" class="ch.qos.logback.core.ConsoleAppender">
5
+ <encoder>
6
+ <pattern>%d{HH:mm:ss.SSS} [%-5level] %logger{15} - %msg%n%rEx</pattern>
7
+ <immediateFlush>false</immediateFlush>
8
+ </encoder>
9
+ </appender>
10
+
11
+ <!-- Uncomment for logging ALL HTTP request and responses -->
12
+ <!-- <logger name="io.gatling.http.ahc" level="TRACE" /> -->
13
+ <!-- <logger name="io.gatling.http.response" level="TRACE" /> -->
14
+ <!-- Uncomment for logging ONLY FAILED HTTP request and responses -->
15
+ <!-- <logger name="io.gatling.http.ahc" level="DEBUG" /> -->
16
+ <!-- <logger name="io.gatling.http.response" level="DEBUG" /> -->
17
+
18
+ <root level="WARN">
19
+ <appender-ref ref="CONSOLE" />
20
+ </root>
21
+
22
+ </configuration>
@@ -0,0 +1,51 @@
1
+ recorder {
2
+ core {
3
+ #mode = "Proxy"
4
+ #encoding = "utf-8" # The encoding used for reading/writing request bodies and the generated simulation
5
+ #outputFolder = "" # The folder where generated simulation will we written
6
+ #package = "" # The package's name of the generated simulation
7
+ #className = "RecordedSimulation" # The name of the generated Simulation class
8
+ #thresholdForPauseCreation = 100 # The minimum time, in milliseconds, that must pass between requests to trigger a pause creation
9
+ #saveConfig = false # When set to true, the configuration from the Recorder GUI overwrites this configuration
10
+ }
11
+ filters {
12
+ #filterStrategy = "Disabled" # The selected filter resources filter strategy (currently supported : "Disabled", "BlackList", "WhiteList")
13
+ #whitelist = [] # The list of ressources patterns that are part of the Recorder's whitelist
14
+ #blacklist = [] # The list of ressources patterns that are part of the Recorder's blacklist
15
+ }
16
+ http {
17
+ #automaticReferer = true # When set to false, write the referer + enable 'disableAutoReferer' in the generated simulation
18
+ #followRedirect = true # When set to false, write redirect requests + enable 'disableFollowRedirect' in the generated simulation
19
+ #removeCacheHeaders = true # When set to true, removes from the generated requests headers leading to request caching
20
+ #inferHtmlResources = true # When set to true, add inferred resources + set 'inferHtmlResources' with the configured blacklist/whitelist in the generated simulation
21
+ #checkResponseBodies = false # When set to true, save response bodies as files and add raw checks in the generated simulation
22
+ }
23
+ proxy {
24
+ #port = 8000 # Local port used by Gatling's Proxy for HTTP/HTTPS
25
+ https {
26
+ #mode = "SelfSignedCertificate" # The selected "HTTPS mode" (currently supported : "SelfSignedCertificate", "ProvidedKeyStore", "GatlingCertificateAuthority", "CustomCertificateAuthority")
27
+ keyStore {
28
+ #path = "" # The path of the custom key store
29
+ #password = "" # The password for this key store
30
+ #type = "JKS" # The type of the key store (currently supported: "JKS")
31
+ }
32
+ certificateAuthority {
33
+ #certificatePath = "" # The path of the custom certificate
34
+ #privateKeyPath = "" # The certificate's private key path
35
+ }
36
+ }
37
+ outgoing {
38
+ #host = "" # The outgoing proxy's hostname
39
+ #username = "" # The username to use to connect to the outgoing proxy
40
+ #password = "" # The password corresponding to the user to use to connect to the outgoing proxy
41
+ #port = 0 # The HTTP port to use to connect to the outgoing proxy
42
+ #sslPort = 0 # If set, The HTTPS port to use to connect to the outgoing proxy
43
+ }
44
+ }
45
+ netty {
46
+ #maxInitialLineLength = 10000 # Maximum length of the initial line of the response (e.g. "HTTP/1.0 200 OK")
47
+ #maxHeaderSize = 20000 # Maximum size, in bytes, of each request's headers
48
+ #maxChunkSize = 8192 # Maximum length of the content or each chunk
49
+ #maxContentLength = 100000000 # Maximum length of the aggregated content of each response
50
+ }
51
+ }
@@ -0,0 +1,77 @@
1
+ #!/bin/bash
2
+ CLASS=$1
3
+ CONTAINER_NAME="gatling-jenkins-docker"
4
+ IMAGE_NAME="gatling-jenkins-docker"
5
+
6
+
7
+ function check_image_exist {
8
+ echo -e "\n*** Checking if docker image exists for the web-scraper... ***\n"
9
+
10
+ if docker images | grep -w ${IMAGE_NAME}
11
+ then
12
+ echo -e "\n*** Image already exists. We can run container... ***\n"
13
+
14
+ else
15
+ build_image
16
+ fi
17
+ }
18
+
19
+ function delete_old_reports {
20
+ rm -rf $WORKSPACE/results/
21
+ docker exec $CONTAINER_NAME rm -rf /opt/gatling/results/*
22
+ }
23
+
24
+ function build_image {
25
+
26
+ echo -e "\n*** Building the image ***\n"
27
+ docker build -t ${IMAGE_NAME} .
28
+ echo -e "\n*** Finished building the image ***\n"
29
+
30
+ }
31
+
32
+ function check_container_exist {
33
+
34
+ echo -e "\n *** Deleting old unused containers"
35
+
36
+ docker rm $(docker ps -a | grep 'gatling-jenkins-docker' | awk '{print $3}')
37
+
38
+ echo -e "\n*** Checking if the container exists ***\n"
39
+
40
+ if docker ps -a | grep ${CONTAINER_NAME}
41
+ then
42
+ echo -e "\n*** Container already exists ***\n"
43
+ docker start ${CONTAINER_NAME}
44
+ else
45
+ echo -e "\n*** Running the container ***\n"
46
+ start_container_with_Gatling
47
+ fi
48
+ }
49
+
50
+ function start_container_with_Gatling {
51
+ docker run -t --rm -v $WORKSPACE/conf:/opt/gatling/conf \
52
+ -v $WORKSPACE/user-files:/opt/gatling/user-files \
53
+ -v $WORKSPACE/results:/opt/gatling/results \
54
+ --name $CONTAINER_NAME $IMAGE_NAME
55
+ }
56
+
57
+ function stop_container {
58
+ docker stop ${CONTAINER_NAME} /opt/gatling/bin/gatling.sh
59
+ }
60
+
61
+ function run_gatling_test {
62
+ docker exec ${CONTAINER_NAME} /opt/gatling/bin/gatling.sh -s $CLASS
63
+ }
64
+
65
+ function copy_gatling_reports_to_workspace {
66
+ docker cp ${CONTAINER_NAME}:/opt/gatling/results $WORKSPACE/
67
+ }
68
+
69
+
70
+
71
+ check_image_exist
72
+ check_container_exist
73
+ delete_old_reports
74
+ start_container_with_Gatling
75
+ run_gatling_test
76
+ copy_gatling_reports_to_workspace
77
+ stop_container
@@ -0,0 +1,28 @@
1
+ #!/bin/bash
2
+ cd ${WORKSPACE}
3
+
4
+ # Remove old Gatling reports and version Download New
5
+
6
+ rm -rf ${WORKSPACE}/gatling-charts-highcharts-bundle-$GATLING_VERSION
7
+
8
+
9
+ curl -sf -o ${WORKSPACE}/gatling-$GATLING_VERSION.zip \
10
+ -L https://repo1.maven.org/maven2/io/gatling/highcharts/gatling-charts-highcharts-bundle/$GATLING_VERSION/gatling-charts-highcharts-bundle-$GATLING_VERSION-bundle.zip
11
+ ls
12
+ unzip ${WORKSPACE}/gatling-$GATLING_VERSION.zip
13
+
14
+ chmod +x ${WORKSPACE}/gatling-charts-highcharts-bundle-$GATLING_VERSION/bin/gatling.sh
15
+ # Set GATLING_HOME
16
+
17
+ export GATLING_HOME=${WORKSPACE}/gatling-charts-highcharts-bundle-$GATLING_VERSION
18
+
19
+ # Remove default simulations & Config and add our own
20
+
21
+ rm -rf ${WORKSPACE}/gatling-charts-highcharts-bundle-$GATLING_VERSION/user-files/simulations/
22
+ rm -rf ${WORKSPACE}/gatling-charts-highcharts-bundle-$GATLING_VERSION/conf
23
+ cp ${WORKSPACE}/user-files/ ${WORKSPACE}/gatling-charts-highcharts-bundle-$GATLING_VERSION/user-files/
24
+ cp ${WORKSPACE}/conf ${WORKSPACE}/gatling-charts-highcharts-bundle-$GATLING_VERSION/conf/
25
+
26
+ # RUN Gatling TEST WITH CLASS CLASS/TEST SCENARIO SPECIFIED
27
+
28
+ sh ${WORKSPACE}/gatling-charts-highcharts-bundle-$GATLING_VERSION/bin/gatling.sh -sf ${WORKSPACE}/gatling-charts-highcharts-bundle-$GATLING_VERSION/user-files/simulations/ -s SampleLoadTEST -rf ${WORKSPACE}/gatling-charts-highcharts-bundle-$GATLING_VERSION/results/
@@ -0,0 +1,29 @@
1
+ #!/bin/bash
2
+ GATLING_VERSION=2.1.7
3
+
4
+ # Remove old Gatling reports and version Download New
5
+
6
+ rm -rf gatling-charts-highcharts-bundle-${GATLING_VERSION}/
7
+ rm -rf gatling-${GATLING_VERSION}.zip/
8
+
9
+
10
+ curl -sf -o gatling-${GATLING_VERSION}.zip \
11
+ -L https://repo1.maven.org/maven2/io/gatling/highcharts/gatling-charts-highcharts-bundle/${GATLING_VERSION}/gatling-charts-highcharts-bundle-${GATLING_VERSION}-bundle.zip
12
+ ls
13
+ unzip gatling-${GATLING_VERSION}.zip
14
+
15
+ chmod +x gatling-charts-highcharts-bundle-${GATLING_VERSION}/bin/gatling.sh
16
+ # Set GATLING_HOME
17
+
18
+ export GATLING_HOME=gatling-charts-highcharts-bundle-${GATLING_VERSION}
19
+
20
+ # Remove default simulations and Config and add our own
21
+
22
+ rm -rf gatling-charts-highcharts-bundle-${GATLING_VERSION}/user-files/
23
+ rm -rf gatling-charts-highcharts-bundle-${GATLING_VERSION}/conf
24
+ cp -rf user-files/ gatling-charts-highcharts-bundle-${GATLING_VERSION}/user-files/
25
+ cp -rf config/ gatling-charts-highcharts-bundle-${GATLING_VERSION}/config
26
+
27
+ # RUN Gatling TEST WITH CLASS CLASS/TEST SCENARIO SPECIFIED
28
+
29
+ sh gatling-charts-highcharts-bundle-${GATLING_VERSION}/bin/gatling.sh -sf gatling-charts-highcharts-bundle-${GATLING_VERSION}/user-files/simulations/ -s SampleLoadTest -rf gatling-charts-highcharts-bundle-${GATLING_VERSION}/results/
File without changes
@@ -0,0 +1,2 @@
1
+ radioStation, programmesName
2
+ Radio4, Archers
@@ -0,0 +1,22 @@
1
+
2
+ import scala.concurrent.duration._
3
+
4
+ import io.gatling.core.Predef._
5
+ import io.gatling.http.Predef._
6
+ import io.gatling.jdbc.Predef._
7
+
8
+ class SampleLoadTEST extends Simulation {
9
+
10
+ val httpConf = http
11
+ .baseURL("http://www.aol.co.uk")
12
+
13
+ val scn = scenario("SampleLoadTEST")
14
+ .exec(http("Sample Load Test")
15
+ .get("/"))
16
+ .pause(2)
17
+
18
+ setUp(
19
+ scn.inject(atOnceUsers(1))
20
+ ).protocols(httpConf).assertions(global.responseTime.max.lessThan(1000))
21
+
22
+ }
@@ -1,8 +1,7 @@
1
1
  require 'cucumber/rake/task'
2
2
  Cucumber::Rake::Task.new(:features) do |features|
3
- features.cucumber_opts = "features -p selenium --format progress"
3
+ features.cucumber_opts = 'features -p selenium --format progress'
4
4
  end
5
5
  Cucumber::Rake::Task.new(:features_ci) do |task|
6
- task.cucumber_opts = ["-p poltergeist -f pretty -f junit --out target/ -f html --out target/report.html"]
6
+ task.cucumber_opts = ['-p poltergeist -f pretty -f junit --out target/ -f html --out target/report.html']
7
7
  end
8
-
@@ -1,4 +1,4 @@
1
- task :cuke_sniffer do
2
- sh 'cd features'
3
- sh 'bundle exec cuke_sniffer'
1
+ task :cuke_sniffer do
2
+ sh 'cd features'
3
+ sh 'bundle exec cuke_sniffer'
4
4
  end
@@ -1,12 +1,11 @@
1
1
  require 'rspec/core/rake_task'
2
- desc "Run specs"
2
+ desc 'Run specs'
3
3
  RSpec::Core::RakeTask.new(:spec) do |t|
4
4
  t.rspec_opts = %w(--color)
5
5
  end
6
6
  namespace :spec do
7
- desc "Run specs with output in documentation format"
7
+ desc 'Run specs with output in documentation format'
8
8
  RSpec::Core::RakeTask.new(:doc) do |t|
9
- t.rspec_opts = ["--color", "--format d"]
9
+ t.rspec_opts = ['--color', '--format d']
10
10
  end
11
11
  end
12
-
@@ -1,12 +1,11 @@
1
1
  require 'rubocop/rake_task'
2
2
 
3
- #desc "Run Rubocop"
3
+ # desc "Run Rubocop"
4
4
 
5
- #Rubocop::RakeTask.new(:rubocop_rake) do |task|
5
+ # Rubocop::RakeTask.new(:rubocop_rake) do |task|
6
6
  # task.patterns = ['features/**/*.rb']
7
- #end
7
+ # end
8
8
 
9
- task :rubocop do
9
+ task :rubocop do
10
10
  sh 'bundle exec rubocop'
11
11
  end
12
-
@@ -1,11 +1,10 @@
1
1
  require 'yard'
2
2
 
3
3
  YARD::Rake::YardocTask.new do |t|
4
- t.files = ['features/**/*.feature', 'features/**/*.rb']
5
- t.options = ['--any', '--extra', '--opts'] # optional
4
+ t.files = ['features/**/*.feature', 'features/**/*.rb']
5
+ t.options = ['--any', '--extra', '--opts'] # optional
6
6
  end
7
7
 
8
- task :yard_bundle do
8
+ task :yard_bundle do
9
9
  sh "bundle exec yardoc 'example/**/*.rb' 'example/**/*.feature'"
10
10
  end
11
-