diggit 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/LICENSE +165 -0
- data/README.md +45 -0
- data/bin/dgit +6 -0
- data/includes/addons/db.rb +22 -0
- data/includes/addons/output.rb +29 -0
- data/includes/addons/test.rb +13 -0
- data/includes/analyses/authors.rb +12 -0
- data/includes/analyses/cloc.rb +22 -0
- data/includes/analyses/diff_stats.rb +39 -0
- data/includes/analyses/metadata.rb +40 -0
- data/includes/analyses/pom.rb +16 -0
- data/includes/analyses/test.rb +13 -0
- data/includes/joins/diff_size.rb +18 -0
- data/includes/joins/test.rb +13 -0
- data/lib/diggit_cli.rb +309 -0
- data/lib/diggit_core.rb +273 -0
- data/spec/diggit_spec.rb +129 -0
- data/spec/spec_helper.rb +31 -0
- metadata +120 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA1:
|
3
|
+
metadata.gz: a4b7487548c7f0524510efe878c636c54b58c304
|
4
|
+
data.tar.gz: 481b02f7281fa616641b4106a36baa3f3a4b6b6c
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: 1a6c92ce2456aff993a743a14a8f71752fdf0f27fae13dafb2e7f58ec1f53795c53381f883b0ce3478380d10799888ef5de5c3908a856bf401a9a27e1fb18280
|
7
|
+
data.tar.gz: bc12d9e29df011d9675fae2392aeee0282ac9cd3571a1e7f216f3545e29a74c55dbc846f9ddc7caf0afb41802333b6d4c632f93847eee3c096f900b6beeaf29f
|
data/LICENSE
ADDED
@@ -0,0 +1,165 @@
|
|
1
|
+
GNU LESSER GENERAL PUBLIC LICENSE
|
2
|
+
Version 3, 29 June 2007
|
3
|
+
|
4
|
+
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
|
5
|
+
Everyone is permitted to copy and distribute verbatim copies
|
6
|
+
of this license document, but changing it is not allowed.
|
7
|
+
|
8
|
+
|
9
|
+
This version of the GNU Lesser General Public License incorporates
|
10
|
+
the terms and conditions of version 3 of the GNU General Public
|
11
|
+
License, supplemented by the additional permissions listed below.
|
12
|
+
|
13
|
+
0. Additional Definitions.
|
14
|
+
|
15
|
+
As used herein, "this License" refers to version 3 of the GNU Lesser
|
16
|
+
General Public License, and the "GNU GPL" refers to version 3 of the GNU
|
17
|
+
General Public License.
|
18
|
+
|
19
|
+
"The Library" refers to a covered work governed by this License,
|
20
|
+
other than an Application or a Combined Work as defined below.
|
21
|
+
|
22
|
+
An "Application" is any work that makes use of an interface provided
|
23
|
+
by the Library, but which is not otherwise based on the Library.
|
24
|
+
Defining a subclass of a class defined by the Library is deemed a mode
|
25
|
+
of using an interface provided by the Library.
|
26
|
+
|
27
|
+
A "Combined Work" is a work produced by combining or linking an
|
28
|
+
Application with the Library. The particular version of the Library
|
29
|
+
with which the Combined Work was made is also called the "Linked
|
30
|
+
Version".
|
31
|
+
|
32
|
+
The "Minimal Corresponding Source" for a Combined Work means the
|
33
|
+
Corresponding Source for the Combined Work, excluding any source code
|
34
|
+
for portions of the Combined Work that, considered in isolation, are
|
35
|
+
based on the Application, and not on the Linked Version.
|
36
|
+
|
37
|
+
The "Corresponding Application Code" for a Combined Work means the
|
38
|
+
object code and/or source code for the Application, including any data
|
39
|
+
and utility programs needed for reproducing the Combined Work from the
|
40
|
+
Application, but excluding the System Libraries of the Combined Work.
|
41
|
+
|
42
|
+
1. Exception to Section 3 of the GNU GPL.
|
43
|
+
|
44
|
+
You may convey a covered work under sections 3 and 4 of this License
|
45
|
+
without being bound by section 3 of the GNU GPL.
|
46
|
+
|
47
|
+
2. Conveying Modified Versions.
|
48
|
+
|
49
|
+
If you modify a copy of the Library, and, in your modifications, a
|
50
|
+
facility refers to a function or data to be supplied by an Application
|
51
|
+
that uses the facility (other than as an argument passed when the
|
52
|
+
facility is invoked), then you may convey a copy of the modified
|
53
|
+
version:
|
54
|
+
|
55
|
+
a) under this License, provided that you make a good faith effort to
|
56
|
+
ensure that, in the event an Application does not supply the
|
57
|
+
function or data, the facility still operates, and performs
|
58
|
+
whatever part of its purpose remains meaningful, or
|
59
|
+
|
60
|
+
b) under the GNU GPL, with none of the additional permissions of
|
61
|
+
this License applicable to that copy.
|
62
|
+
|
63
|
+
3. Object Code Incorporating Material from Library Header Files.
|
64
|
+
|
65
|
+
The object code form of an Application may incorporate material from
|
66
|
+
a header file that is part of the Library. You may convey such object
|
67
|
+
code under terms of your choice, provided that, if the incorporated
|
68
|
+
material is not limited to numerical parameters, data structure
|
69
|
+
layouts and accessors, or small macros, inline functions and templates
|
70
|
+
(ten or fewer lines in length), you do both of the following:
|
71
|
+
|
72
|
+
a) Give prominent notice with each copy of the object code that the
|
73
|
+
Library is used in it and that the Library and its use are
|
74
|
+
covered by this License.
|
75
|
+
|
76
|
+
b) Accompany the object code with a copy of the GNU GPL and this license
|
77
|
+
document.
|
78
|
+
|
79
|
+
4. Combined Works.
|
80
|
+
|
81
|
+
You may convey a Combined Work under terms of your choice that,
|
82
|
+
taken together, effectively do not restrict modification of the
|
83
|
+
portions of the Library contained in the Combined Work and reverse
|
84
|
+
engineering for debugging such modifications, if you also do each of
|
85
|
+
the following:
|
86
|
+
|
87
|
+
a) Give prominent notice with each copy of the Combined Work that
|
88
|
+
the Library is used in it and that the Library and its use are
|
89
|
+
covered by this License.
|
90
|
+
|
91
|
+
b) Accompany the Combined Work with a copy of the GNU GPL and this license
|
92
|
+
document.
|
93
|
+
|
94
|
+
c) For a Combined Work that displays copyright notices during
|
95
|
+
execution, include the copyright notice for the Library among
|
96
|
+
these notices, as well as a reference directing the user to the
|
97
|
+
copies of the GNU GPL and this license document.
|
98
|
+
|
99
|
+
d) Do one of the following:
|
100
|
+
|
101
|
+
0) Convey the Minimal Corresponding Source under the terms of this
|
102
|
+
License, and the Corresponding Application Code in a form
|
103
|
+
suitable for, and under terms that permit, the user to
|
104
|
+
recombine or relink the Application with a modified version of
|
105
|
+
the Linked Version to produce a modified Combined Work, in the
|
106
|
+
manner specified by section 6 of the GNU GPL for conveying
|
107
|
+
Corresponding Source.
|
108
|
+
|
109
|
+
1) Use a suitable shared library mechanism for linking with the
|
110
|
+
Library. A suitable mechanism is one that (a) uses at run time
|
111
|
+
a copy of the Library already present on the user's computer
|
112
|
+
system, and (b) will operate properly with a modified version
|
113
|
+
of the Library that is interface-compatible with the Linked
|
114
|
+
Version.
|
115
|
+
|
116
|
+
e) Provide Installation Information, but only if you would otherwise
|
117
|
+
be required to provide such information under section 6 of the
|
118
|
+
GNU GPL, and only to the extent that such information is
|
119
|
+
necessary to install and execute a modified version of the
|
120
|
+
Combined Work produced by recombining or relinking the
|
121
|
+
Application with a modified version of the Linked Version. (If
|
122
|
+
you use option 4d0, the Installation Information must accompany
|
123
|
+
the Minimal Corresponding Source and Corresponding Application
|
124
|
+
Code. If you use option 4d1, you must provide the Installation
|
125
|
+
Information in the manner specified by section 6 of the GNU GPL
|
126
|
+
for conveying Corresponding Source.)
|
127
|
+
|
128
|
+
5. Combined Libraries.
|
129
|
+
|
130
|
+
You may place library facilities that are a work based on the
|
131
|
+
Library side by side in a single library together with other library
|
132
|
+
facilities that are not Applications and are not covered by this
|
133
|
+
License, and convey such a combined library under terms of your
|
134
|
+
choice, if you do both of the following:
|
135
|
+
|
136
|
+
a) Accompany the combined library with a copy of the same work based
|
137
|
+
on the Library, uncombined with any other library facilities,
|
138
|
+
conveyed under the terms of this License.
|
139
|
+
|
140
|
+
b) Give prominent notice with the combined library that part of it
|
141
|
+
is a work based on the Library, and explaining where to find the
|
142
|
+
accompanying uncombined form of the same work.
|
143
|
+
|
144
|
+
6. Revised Versions of the GNU Lesser General Public License.
|
145
|
+
|
146
|
+
The Free Software Foundation may publish revised and/or new versions
|
147
|
+
of the GNU Lesser General Public License from time to time. Such new
|
148
|
+
versions will be similar in spirit to the present version, but may
|
149
|
+
differ in detail to address new problems or concerns.
|
150
|
+
|
151
|
+
Each version is given a distinguishing version number. If the
|
152
|
+
Library as you received it specifies that a certain numbered version
|
153
|
+
of the GNU Lesser General Public License "or any later version"
|
154
|
+
applies to it, you have the option of following the terms and
|
155
|
+
conditions either of that published version or of any later version
|
156
|
+
published by the Free Software Foundation. If the Library as you
|
157
|
+
received it does not specify a version number of the GNU Lesser
|
158
|
+
General Public License, you may choose any version of the GNU Lesser
|
159
|
+
General Public License ever published by the Free Software Foundation.
|
160
|
+
|
161
|
+
If the Library as you received it specifies that a proxy can decide
|
162
|
+
whether future versions of the GNU Lesser General Public License shall
|
163
|
+
apply, that proxy's public statement of acceptance of any version is
|
164
|
+
permanent authorization for you to choose that version for the
|
165
|
+
Library.
|
data/README.md
ADDED
@@ -0,0 +1,45 @@
|
|
1
|
+
# Diggit
|
2
|
+
|
3
|
+
A ruby tool to analyse Git repositories
|
4
|
+
|
5
|
+
# Installation
|
6
|
+
|
7
|
+
Clone diggit using the following command.
|
8
|
+
|
9
|
+
```
|
10
|
+
git clone https://github.com/jrfaller/diggit.git
|
11
|
+
```
|
12
|
+
|
13
|
+
The diggit tool is in the lib folder. Don't hesitate to create a link to diggit.rb to be able to launch it in any repository.
|
14
|
+
|
15
|
+
# Usage
|
16
|
+
|
17
|
+
## Configuration
|
18
|
+
|
19
|
+
The diggit tool is designed to help you analyze software repositories. Firstly you have to create a new folder in which you launch the `diggit init` command. This way, the folder becomes a diggit folder in which you can configure repositories and analyses.
|
20
|
+
|
21
|
+
### Setting-up the repositories
|
22
|
+
|
23
|
+
You can add some repositories to be analyzed with the following command: `dgit sources add https://github.com/jrfaller/diggit.git`.
|
24
|
+
|
25
|
+
### Using addons
|
26
|
+
|
27
|
+
Addons add features the the diggit tool: for instance capability of writing to a mondodb database, etc. To enable addons for your current diggit folder you can use the following command: `dgit addons add TestAddon`.
|
28
|
+
|
29
|
+
### Setting-up analyses
|
30
|
+
|
31
|
+
An analysis is applied to each repository. You can configure the analyses to be performed with the following command: `dgit analyses add TestAnalysis`. Analyses are performed in the order they have been added.
|
32
|
+
|
33
|
+
### Setting-up joins
|
34
|
+
|
35
|
+
A join is performed after all analyses of all repositories have been performed. You can configure the joins to be performed with the following command: `dgit joins add TestJoin`. Joins are performed in the order they have been added.
|
36
|
+
|
37
|
+
## Running analyses
|
38
|
+
|
39
|
+
Once diggit is configured you can perform the analyses. First you have to perform the clone by using `dgit perform clones`. Then you can launch the analyses by using `dgit perform analyses`. Finally, the joins are executed via the command `dgit perform joins`.
|
40
|
+
|
41
|
+
At all time, you can check the status of your diggit folder by using `diggit status`. If you want more info on the status of a given repository, you can use the `dgit sources info https://github.com/jrfaller/diggit.git` command.
|
42
|
+
|
43
|
+
## Cleaning up
|
44
|
+
|
45
|
+
If something is going error, you can always delete the results of the joins by using the command `dgit clean joins` and of the analysis with the command `dgit clean analyses`.
|
data/bin/dgit
ADDED
@@ -0,0 +1,22 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require 'mongo'
|
3
|
+
|
4
|
+
class Db < Diggit::Addon
|
5
|
+
|
6
|
+
DEFAULT_DB = 'diggit'
|
7
|
+
|
8
|
+
attr_reader :db
|
9
|
+
|
10
|
+
def initialize(*args)
|
11
|
+
super
|
12
|
+
client = Mongo::MongoClient.new
|
13
|
+
database = DEFAULT_DB
|
14
|
+
database = @options[:mongo][:database] if @options.has_key?(:mongo) && @options[:mongo].has_key?(:database)
|
15
|
+
@db = client[database]
|
16
|
+
end
|
17
|
+
|
18
|
+
def name
|
19
|
+
:db
|
20
|
+
end
|
21
|
+
|
22
|
+
end
|
@@ -0,0 +1,29 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
|
3
|
+
class Output < Diggit::Addon
|
4
|
+
|
5
|
+
attr_reader :out, :tmp
|
6
|
+
|
7
|
+
DEFAULT_OUT = 'out'
|
8
|
+
DEFAULT_TMP = 'tmp'
|
9
|
+
|
10
|
+
def initialize(*args)
|
11
|
+
super
|
12
|
+
|
13
|
+
out = DEFAULT_OUT
|
14
|
+
out = @options[:output][:out] if @options.has_key?(:output) && @options[:output].has_key?(:out)
|
15
|
+
tmp = DEFAULT_TMP
|
16
|
+
tmp = @options[:output][:tmp] if @options.has_key?(:output) && @options[:output].has_key?(:tmp)
|
17
|
+
|
18
|
+
@out = File.absolute_path(out)
|
19
|
+
@tmp = File.absolute_path(tmp)
|
20
|
+
|
21
|
+
FileUtils.mkdir_p(@out) unless File.exist?(@out)
|
22
|
+
FileUtils.mkdir_p(@tmp) unless File.exist?(@tmp)
|
23
|
+
end
|
24
|
+
|
25
|
+
def name
|
26
|
+
:output
|
27
|
+
end
|
28
|
+
|
29
|
+
end
|
@@ -0,0 +1,12 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
|
3
|
+
class AuthorAnalysis < Diggit::Analysis
|
4
|
+
|
5
|
+
def run
|
6
|
+
walker = Rugged::Walker.new(@repo)
|
7
|
+
walker.sorting(Rugged::SORT_DATE)
|
8
|
+
walker.push(@repo.last_commit)
|
9
|
+
authors = walker.collect{ |c| c.author[:name] }.uniq
|
10
|
+
puts "Authors : #{authors}"
|
11
|
+
end
|
12
|
+
end
|
@@ -0,0 +1,22 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
|
3
|
+
require 'yaml'
|
4
|
+
|
5
|
+
class ClocAnalysis < Diggit::Analysis
|
6
|
+
|
7
|
+
def run
|
8
|
+
cloc = `cloc . --progress-rate=0 --quiet --yaml`
|
9
|
+
unless cloc.empty?
|
10
|
+
yaml = YAML.load(cloc.lines[2..-1].join)
|
11
|
+
yaml.delete('header')
|
12
|
+
output = { source: @source, cloc: yaml }
|
13
|
+
col = @addons[:db].db['cloc']
|
14
|
+
col.insert(output)
|
15
|
+
end
|
16
|
+
end
|
17
|
+
|
18
|
+
def clean(source)
|
19
|
+
@addons[:db].db['cloc'].remove({source: source})
|
20
|
+
end
|
21
|
+
|
22
|
+
end
|
@@ -0,0 +1,39 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
|
3
|
+
class DiffStats < Diggit::Analysis
|
4
|
+
|
5
|
+
ACCEPTED_EXTENSIONS = [".java", ".c", ".h", ".js", ".javascrip" ]
|
6
|
+
|
7
|
+
def run
|
8
|
+
col = @addons[:db].db['diffstats']
|
9
|
+
|
10
|
+
walker = Rugged::Walker.new(@repo)
|
11
|
+
ref = @repo.references["refs/heads/master"]
|
12
|
+
walker.push(ref.target_id)
|
13
|
+
walker.each do |commit|
|
14
|
+
parent1 = commit.parents[0]
|
15
|
+
if parent1
|
16
|
+
diff1 = parent1.diff(commit)
|
17
|
+
diff1.each_delta do |delta|
|
18
|
+
old_path = delta.old_file[:path]
|
19
|
+
new_path = delta.new_file[:path]
|
20
|
+
old_ext = File.extname(old_path).downcase
|
21
|
+
new_ext = File.extname(new_path).downcase
|
22
|
+
if delta.status == :modified && old_ext.eql?(new_ext) && ACCEPTED_EXTENSIONS.include?(old_ext)
|
23
|
+
sha_old = delta.old_file[:oid]
|
24
|
+
sha_new = delta.new_file[:oid]
|
25
|
+
patch = @repo.lookup(sha_new).diff(@repo.lookup(sha_old))
|
26
|
+
changes = patch.changes
|
27
|
+
edit = {source: @source, old_path: old_path, new_path: new_path, old_commit: commit.oid, new_commit: parent1.oid, changes: changes}
|
28
|
+
col.insert(edit)
|
29
|
+
end
|
30
|
+
end
|
31
|
+
end
|
32
|
+
end
|
33
|
+
end
|
34
|
+
|
35
|
+
def clean
|
36
|
+
@addons[:db].db['diffstats'].remove({source: @source})
|
37
|
+
end
|
38
|
+
|
39
|
+
end
|
@@ -0,0 +1,40 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
|
3
|
+
class MetadataAnalysis < Diggit::Analysis
|
4
|
+
|
5
|
+
def run
|
6
|
+
# Importing tags
|
7
|
+
tags = @addons[:db].db['tags']
|
8
|
+
@repo.tags.each do |t|
|
9
|
+
tag = { source: @source, name: t.name, target: t.target.oid }
|
10
|
+
tags.insert(tag)
|
11
|
+
end
|
12
|
+
|
13
|
+
# Importing branches
|
14
|
+
branches = @addons[:db].db['branches']
|
15
|
+
@repo.branches.each do |b|
|
16
|
+
branch = { source: @source, name: b.name, target: b.target.oid }
|
17
|
+
branches.insert(branch)
|
18
|
+
end
|
19
|
+
|
20
|
+
# Importing commits
|
21
|
+
commits = @addons[:db].db['commits']
|
22
|
+
walker = Rugged::Walker.new(@repo)
|
23
|
+
walker.sorting(Rugged::SORT_DATE)
|
24
|
+
walker.push(@repo.last_commit)
|
25
|
+
walker.each do |c|
|
26
|
+
commit = {
|
27
|
+
source: @source, oid: c.oid, message: c.message, author: c.author,
|
28
|
+
committer: c.committer, parent_ids: c.parent_ids, time: c.time
|
29
|
+
}
|
30
|
+
commits.insert(commit)
|
31
|
+
end
|
32
|
+
end
|
33
|
+
|
34
|
+
def clean
|
35
|
+
@addons[:db].db['tags'].remove({source: @source})
|
36
|
+
@addons[:db].db['branches'].remove({source: @source})
|
37
|
+
@addons[:db].db['commits'].remove({source: @source})
|
38
|
+
end
|
39
|
+
|
40
|
+
end
|
@@ -0,0 +1,16 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
|
3
|
+
class PomAnalysis < Diggit::Analysis
|
4
|
+
|
5
|
+
def run
|
6
|
+
pom_files = Dir['**/pom.xml']
|
7
|
+
pom_files.each do |file|
|
8
|
+
puts file
|
9
|
+
git = `git --no-pager log --pretty=%H%n%an --name-status #{file}`.lines
|
10
|
+
history = []
|
11
|
+
git.each_slice(4) { |slice| history << [slice[0].strip, slice[1].strip, slice[3].split("\t")[0].strip] }
|
12
|
+
puts history.inspect
|
13
|
+
end
|
14
|
+
end
|
15
|
+
|
16
|
+
end
|
@@ -0,0 +1,18 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
|
3
|
+
class DiffSize < Diggit::Join
|
4
|
+
|
5
|
+
ACCEPTED_EXTENSIONS = [".java", ".c", ".h", ".js", ".javascrip" ]
|
6
|
+
|
7
|
+
def run
|
8
|
+
col = @addons[:db].db['diffstats']
|
9
|
+
@sources.each do |source|
|
10
|
+
diffs = col.count({:query => {"source" => source}})
|
11
|
+
puts "#{source}: #{diffs}"
|
12
|
+
end
|
13
|
+
end
|
14
|
+
|
15
|
+
def clean
|
16
|
+
end
|
17
|
+
|
18
|
+
end
|
data/lib/diggit_cli.rb
ADDED
@@ -0,0 +1,309 @@
|
|
1
|
+
require 'thor'
|
2
|
+
require 'fileutils'
|
3
|
+
|
4
|
+
require_relative 'diggit_core'
|
5
|
+
|
6
|
+
module Diggit
|
7
|
+
|
8
|
+
module Utils
|
9
|
+
|
10
|
+
def diggit
|
11
|
+
@diggit = Diggit.new if @diggit.nil?
|
12
|
+
return @diggit
|
13
|
+
end
|
14
|
+
|
15
|
+
def dump_error(e)
|
16
|
+
{ name: e.class.name, message: e.to_s, backtrace: e.backtrace }
|
17
|
+
end
|
18
|
+
|
19
|
+
def class_exist?(class_name)
|
20
|
+
obj = Object::const_get(class_name)
|
21
|
+
return obj.is_a?(Class)
|
22
|
+
rescue NameError
|
23
|
+
return false
|
24
|
+
end
|
25
|
+
|
26
|
+
def source_color(source_hash)
|
27
|
+
if source_hash[:log][:error].empty?
|
28
|
+
return :blue
|
29
|
+
else
|
30
|
+
return :red
|
31
|
+
end
|
32
|
+
end
|
33
|
+
|
34
|
+
def status_color(status)
|
35
|
+
if status == DONE
|
36
|
+
return :green
|
37
|
+
else
|
38
|
+
return :red
|
39
|
+
end
|
40
|
+
end
|
41
|
+
|
42
|
+
end
|
43
|
+
|
44
|
+
module Cli
|
45
|
+
|
46
|
+
DONE = '[done]'
|
47
|
+
WARNING = '[warning]'
|
48
|
+
ERROR = '[error]'
|
49
|
+
INFO = '[info]'
|
50
|
+
|
51
|
+
class SourcesCli < Thor
|
52
|
+
include Thor::Actions
|
53
|
+
include Utils
|
54
|
+
|
55
|
+
desc 'list', "Display the list of sources."
|
56
|
+
def list
|
57
|
+
idx = 1
|
58
|
+
diggit.sources.hashes.each do |s|
|
59
|
+
say_status("[#{s[:log][:state]}]", "#{idx}: #{s[:url]}", source_color(s))
|
60
|
+
idx += 1
|
61
|
+
end
|
62
|
+
errors = diggit.sources.errors.size
|
63
|
+
status = (errors== 0 && DONE) || ERROR
|
64
|
+
say_status(status, "listed #{diggit.sources.size} sources including #{errors} errors", status_color(status))
|
65
|
+
end
|
66
|
+
|
67
|
+
desc 'info [SOURCE_DEF]', "Display informations on the provided source definition (either source URL or id)."
|
68
|
+
def info(source_def)
|
69
|
+
s = diggit.sources.get(source_def)
|
70
|
+
say_status("[#{s[:log][:state]}]", "#{s[:url]}", source_color(s))
|
71
|
+
say_status('[folder]', "#{s[:folder]}", :blue)
|
72
|
+
unless s[:log][:error].empty?
|
73
|
+
say_status(ERROR, "#{s[:log][:error][:name]}", :red)
|
74
|
+
say_status('[message]', "#{s[:log][:error][:message]}", :red)
|
75
|
+
say_status('[backtrace]', "", :red)
|
76
|
+
say(s[:log][:error][:backtrace].join("\n"))
|
77
|
+
end
|
78
|
+
end
|
79
|
+
|
80
|
+
desc "errors", "Display informations on all source that have encountered an error."
|
81
|
+
def errors
|
82
|
+
diggit.sources.get_all(nil, {error: true}).each{|s| invoke :info, [s[:url]]}
|
83
|
+
end
|
84
|
+
|
85
|
+
desc 'import [FILE]', "Import a list of sources from a file (one URL per line)."
|
86
|
+
def import(urls_file)
|
87
|
+
IO.readlines(urls_file).each{ |line| diggit.sources.add(line.strip) }
|
88
|
+
end
|
89
|
+
|
90
|
+
desc "add [URL*]", "Add the provided urls to the list of sources."
|
91
|
+
def add(*urls)
|
92
|
+
urls.each{ |u| diggit.sources.add(u) }
|
93
|
+
end
|
94
|
+
|
95
|
+
desc "rem [SOURCE_DEF*]", "Remove the sources correspondign to the provided source definitions (id or URL) from the list of sources."
|
96
|
+
def rem(*sources_defs)
|
97
|
+
sources_defs.each { |s| diggit.sources.rem(s) }
|
98
|
+
end
|
99
|
+
end
|
100
|
+
|
101
|
+
class AddonsCli < Thor
|
102
|
+
include Thor::Actions
|
103
|
+
include Utils
|
104
|
+
|
105
|
+
desc "add [ADDON*]", "Add the provided addons to the list of active addons."
|
106
|
+
def add(*addons)
|
107
|
+
addons.each do |a|
|
108
|
+
if class_exist?(a)
|
109
|
+
diggit.config.add_addon(a)
|
110
|
+
else
|
111
|
+
say_status(ERROR, "addon #{a} not found", :red)
|
112
|
+
end
|
113
|
+
end
|
114
|
+
end
|
115
|
+
|
116
|
+
desc "rem [ADDON*]", "Remove the provided addons from the list of active addons."
|
117
|
+
def rem(*addons)
|
118
|
+
addons.each{ |a| diggit.config.rem_addon(a) }
|
119
|
+
end
|
120
|
+
|
121
|
+
end
|
122
|
+
|
123
|
+
class JoinsCli < Thor
|
124
|
+
include Thor::Actions
|
125
|
+
include Utils
|
126
|
+
|
127
|
+
desc "add [JOIN*]", "Add the provided joins to the list of active joins."
|
128
|
+
def add(*joins)
|
129
|
+
joins.each do |j|
|
130
|
+
if class_exist?(j)
|
131
|
+
diggit.config.add_join(j)
|
132
|
+
else
|
133
|
+
say_status(ERROR, "join #{j} not found", :red)
|
134
|
+
end
|
135
|
+
end
|
136
|
+
end
|
137
|
+
|
138
|
+
desc "rem [JOIN*]", "Remove the provided joins from the list of active joins."
|
139
|
+
def rem(*joins)
|
140
|
+
joins.each{ |j| diggit.config.rem_join(j) }
|
141
|
+
end
|
142
|
+
|
143
|
+
end
|
144
|
+
|
145
|
+
class AnalysesCli < Thor
|
146
|
+
include Thor::Actions
|
147
|
+
include Utils
|
148
|
+
|
149
|
+
desc "add [ANALYSIS*]", "Add the provided analyses to the list of active analyses."
|
150
|
+
def add(*analyses)
|
151
|
+
analyses.each do |a|
|
152
|
+
if class_exist?(a)
|
153
|
+
diggit.config.add_analysis(a)
|
154
|
+
else
|
155
|
+
say_status(ERROR, "analysis #{a} not found", :red)
|
156
|
+
end
|
157
|
+
end
|
158
|
+
end
|
159
|
+
|
160
|
+
desc "rem [ANALYSIS*]", "Remove the provided analyses from the list of active analyses."
|
161
|
+
def rem(*analyses)
|
162
|
+
analyses.each{ |a| diggit.config.rem_analysis(a) }
|
163
|
+
end
|
164
|
+
|
165
|
+
end
|
166
|
+
|
167
|
+
class PerformCli < Thor
|
168
|
+
include Thor::Actions
|
169
|
+
include Utils
|
170
|
+
|
171
|
+
desc "clones [SOURCE_DEFS*]", "Clone the sources corresponding to the provided source definitions (id or URL). Clone all sources if no source definitions are provided."
|
172
|
+
def clones(*source_defs)
|
173
|
+
diggit.sources.get_all(source_defs, {state: :new}).each do |s|
|
174
|
+
begin
|
175
|
+
Rugged::Repository::clone_at(s[:url], s[:folder])
|
176
|
+
rescue => e
|
177
|
+
s[:log][:error] = dump_error(e)
|
178
|
+
say_status(ERROR, "error cloning #{s[:url]}", :red)
|
179
|
+
else
|
180
|
+
s[:log][:state] = :cloned
|
181
|
+
s[:log][:error] = {}
|
182
|
+
say_status(DONE, "#{s[:url]} cloned", :blue)
|
183
|
+
ensure
|
184
|
+
diggit.sources.update(s)
|
185
|
+
end
|
186
|
+
end
|
187
|
+
end
|
188
|
+
|
189
|
+
desc "analyses [SOURCE_DEFS*]", "Perform the configured analyses to the sources corresponding to the provided source definitions (id or URL). Analyze all sources if no source definitions are provided."
|
190
|
+
def analyses(*source_defs)
|
191
|
+
addons = diggit.config.load_addons
|
192
|
+
diggit.sources.get_all(source_defs, {state: :cloned}).each do |s|
|
193
|
+
FileUtils.cd(s[:folder])
|
194
|
+
globs = {}
|
195
|
+
performed_analyses = []
|
196
|
+
begin
|
197
|
+
repo = Rugged::Repository.new('.')
|
198
|
+
diggit.config.load_analyses(s[:url], repo, addons, globs).each do |a|
|
199
|
+
performed_analyses << a.class.to_s
|
200
|
+
a.run
|
201
|
+
end
|
202
|
+
rescue => e
|
203
|
+
s[:log][:error] = dump_error(e)
|
204
|
+
s[:log][:analyses] = performed_analyses[1..-2]
|
205
|
+
say_status(ERROR, "error performing #{performed_analyses.last} on #{s[:url]}", :red)
|
206
|
+
else
|
207
|
+
s[:log][:analyses] = performed_analyses
|
208
|
+
s[:log][:state] = :finished
|
209
|
+
s[:log][:error] = {}
|
210
|
+
say_status(DONE, "source #{s[:url]} analyzed", :blue)
|
211
|
+
ensure
|
212
|
+
FileUtils.cd(diggit.root)
|
213
|
+
diggit.sources.update(s)
|
214
|
+
end
|
215
|
+
end
|
216
|
+
end
|
217
|
+
|
218
|
+
desc "joins", "Perform the configured joins."
|
219
|
+
def joins
|
220
|
+
addons = diggit.config.load_addons
|
221
|
+
globs = {}
|
222
|
+
diggit.config.load_joins(diggit.sources.get_all([], {state: :finished, error: false}), addons, globs).each{ |j| j.run }
|
223
|
+
say_status(DONE, "joins performed", :blue)
|
224
|
+
end
|
225
|
+
|
226
|
+
end
|
227
|
+
|
228
|
+
class CleanCli < Thor
|
229
|
+
include Thor::Actions
|
230
|
+
include Utils
|
231
|
+
|
232
|
+
desc "analyses", "Clean the configured analyzes on the provided source definitions (id or URL). Clean all sources if no source definitions are provided."
|
233
|
+
def analyses(*source_defs)
|
234
|
+
addons = diggit.config.load_addons
|
235
|
+
diggit.sources.get_all(source_defs, {state: :finished}).each do |s|
|
236
|
+
globs = {}
|
237
|
+
diggit.config.load_analyses(s[:url], nil, addons, globs).each{ |a| a.clean}
|
238
|
+
s[:log][:state] = :cloned
|
239
|
+
s[:log][:analyses] = []
|
240
|
+
s[:log][:error] = {}
|
241
|
+
diggit.sources.update(s)
|
242
|
+
say_status(DONE, "cleaned analyses on #{s[:url]}", :blue)
|
243
|
+
end
|
244
|
+
end
|
245
|
+
|
246
|
+
desc "joins", "Clean the configured joins."
|
247
|
+
def joins
|
248
|
+
addons = diggit.config.load_addons
|
249
|
+
globs = {}
|
250
|
+
diggit.config.load_joins(diggit.sources.get_all([], {state: :finished, error: false}), addons, globs).each{ |j| j.clean }
|
251
|
+
end
|
252
|
+
|
253
|
+
end
|
254
|
+
|
255
|
+
class DiggitCli < Thor
|
256
|
+
include Thor::Actions
|
257
|
+
include Utils
|
258
|
+
|
259
|
+
def initialize(*args)
|
260
|
+
super
|
261
|
+
cmd = args[2][:current_command].name
|
262
|
+
unless 'init'.eql?(cmd) || 'help'.eql?(cmd) || File.exist?(DIGGIT_RC)
|
263
|
+
say_status(ERROR, "this is not a diggit directory", :red)
|
264
|
+
exit
|
265
|
+
end
|
266
|
+
diggit
|
267
|
+
end
|
268
|
+
|
269
|
+
desc "init", "Initialize the current folder as a diggit folder."
|
270
|
+
def init
|
271
|
+
FileUtils.touch(DIGGIT_SOURCES)
|
272
|
+
Oj.to_file(DIGGIT_LOG, {})
|
273
|
+
Oj.to_file(DIGGIT_RC, { addons: [], analyses: [], joins: [], options: {} })
|
274
|
+
say_status(DONE, "folder initialized")
|
275
|
+
end
|
276
|
+
|
277
|
+
desc 'status', "Display the status of the current diggit folder."
|
278
|
+
def status
|
279
|
+
color = (diggit.sources.get_all(nil, {error: true}).size > 0 && :red) || :blue
|
280
|
+
say_status('[sources]', "#{diggit.sources.get_all([], {state: :new}).size} new (#{diggit.sources.get_all([], {state: :new, error: true}).size} errors), #{diggit.sources.get_all([], {state: :cloned}).size} cloned (#{diggit.sources.get_all([], {state: :cloned, error: true}).size} errors), #{diggit.sources.get_all([], {state: :finished}).size} finished", color)
|
281
|
+
say_status('[addons]', "#{diggit.config.addons.join(', ')}", :blue)
|
282
|
+
say_status('[analyses]', "#{diggit.config.analyses.join(', ')}", :blue)
|
283
|
+
say_status('[joins]', "#{diggit.config.joins.join(', ')}", :blue)
|
284
|
+
say_status('[options]', "#{diggit.config.options}", :blue)
|
285
|
+
end
|
286
|
+
|
287
|
+
desc "sources SUBCOMMAND ...ARGS", "manage sources for the current diggit folder."
|
288
|
+
subcommand "sources", SourcesCli
|
289
|
+
|
290
|
+
desc "joins SUBCOMMAND ...ARGS", "manage joins for the current diggit folder."
|
291
|
+
subcommand "joins", JoinsCli
|
292
|
+
|
293
|
+
desc "analyses SUBCOMMAND ...ARGS", "manage analyses for the current diggit folder."
|
294
|
+
subcommand "analyses", AnalysesCli
|
295
|
+
|
296
|
+
desc "addons SUBCOMMAND ...ARGS", "manage addons for the current diggit folder."
|
297
|
+
subcommand "addons", AddonsCli
|
298
|
+
|
299
|
+
desc "perform SUBCOMMAND ...ARGS", "perform actions in the current diggit folder."
|
300
|
+
subcommand "perform", PerformCli
|
301
|
+
|
302
|
+
desc "clean SUBCOMMAND ...ARGS", "clean the current diggit folder."
|
303
|
+
subcommand "clean", CleanCli
|
304
|
+
|
305
|
+
end
|
306
|
+
|
307
|
+
end
|
308
|
+
|
309
|
+
end
|
data/lib/diggit_core.rb
ADDED
@@ -0,0 +1,273 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
# encoding: utf-8
|
3
|
+
|
4
|
+
require 'rugged'
|
5
|
+
require 'oj'
|
6
|
+
require 'singleton'
|
7
|
+
|
8
|
+
module Diggit
|
9
|
+
|
10
|
+
DIGGIT_RC = '.dgitrrc'
|
11
|
+
DIGGIT_LOG = '.dgitlog'
|
12
|
+
DIGGIT_SOURCES = '.dgitsources'
|
13
|
+
|
14
|
+
SOURCES_FOLDER = 'sources'
|
15
|
+
INCLUDES_FOLDER = 'includes'
|
16
|
+
|
17
|
+
class Addon
|
18
|
+
|
19
|
+
def initialize(options)
|
20
|
+
@options = options
|
21
|
+
end
|
22
|
+
|
23
|
+
def name
|
24
|
+
end
|
25
|
+
|
26
|
+
end
|
27
|
+
|
28
|
+
class Analysis
|
29
|
+
|
30
|
+
def initialize(source, repo, options, addons, globs)
|
31
|
+
@source = source
|
32
|
+
@repo = repo
|
33
|
+
@options = options
|
34
|
+
@addons = addons
|
35
|
+
@globs = globs
|
36
|
+
end
|
37
|
+
|
38
|
+
def run
|
39
|
+
raise NoMethodError.new "Subclass responsability"
|
40
|
+
end
|
41
|
+
|
42
|
+
def clean
|
43
|
+
raise NoMethodError.new "Subclass responsability"
|
44
|
+
end
|
45
|
+
|
46
|
+
end
|
47
|
+
|
48
|
+
class Join
|
49
|
+
def initialize(sources, options, addons, globs)
|
50
|
+
@sources = sources
|
51
|
+
@options = options
|
52
|
+
@addons = addons
|
53
|
+
@globs = globs
|
54
|
+
end
|
55
|
+
|
56
|
+
def run
|
57
|
+
raise NoMethodError.new "Subclass responsability"
|
58
|
+
end
|
59
|
+
|
60
|
+
def clean
|
61
|
+
raise NoMethodError.new "Subclass responsability"
|
62
|
+
end
|
63
|
+
|
64
|
+
end
|
65
|
+
|
66
|
+
class Config
|
67
|
+
|
68
|
+
def initialize
|
69
|
+
@config = Oj.load_file(DIGGIT_RC)
|
70
|
+
end
|
71
|
+
|
72
|
+
def save
|
73
|
+
Oj.to_file(DIGGIT_RC, @config)
|
74
|
+
end
|
75
|
+
|
76
|
+
def analyses
|
77
|
+
@config[:analyses]
|
78
|
+
end
|
79
|
+
|
80
|
+
def add_analysis(analysis)
|
81
|
+
analyses << analysis unless analyses.include?(analysis)
|
82
|
+
save
|
83
|
+
end
|
84
|
+
|
85
|
+
def rem_analysis(analysis)
|
86
|
+
analyses.delete(analysis)
|
87
|
+
save
|
88
|
+
end
|
89
|
+
|
90
|
+
def load_analyses(source, repo, addons, globs)
|
91
|
+
analyses.map{ |a| Object::const_get(a).new(source, repo, options, addons, globs) }
|
92
|
+
end
|
93
|
+
|
94
|
+
def addons
|
95
|
+
return @config[:addons]
|
96
|
+
end
|
97
|
+
|
98
|
+
def add_addon(addon)
|
99
|
+
addons << addon unless addons.include?(addon)
|
100
|
+
save
|
101
|
+
end
|
102
|
+
|
103
|
+
def rem_addon(addon)
|
104
|
+
addons.delete(addon)
|
105
|
+
save
|
106
|
+
end
|
107
|
+
|
108
|
+
def load_addons
|
109
|
+
result = {}
|
110
|
+
addons.each do |a|
|
111
|
+
obj = Object::const_get(a).new(options)
|
112
|
+
result[obj.name] = obj
|
113
|
+
end
|
114
|
+
return result
|
115
|
+
end
|
116
|
+
|
117
|
+
def joins
|
118
|
+
return @config[:joins]
|
119
|
+
end
|
120
|
+
|
121
|
+
def add_join(join)
|
122
|
+
joins << join unless joins.include?(join)
|
123
|
+
save
|
124
|
+
end
|
125
|
+
|
126
|
+
def rem_join(join)
|
127
|
+
joins.delete(join)
|
128
|
+
save
|
129
|
+
end
|
130
|
+
|
131
|
+
def load_joins(finished_sources, addons, globs)
|
132
|
+
return joins.map{ |j| Object::const_get(j).new(finished_sources, options, addons, globs) }
|
133
|
+
end
|
134
|
+
|
135
|
+
def options
|
136
|
+
return @config[:options]
|
137
|
+
end
|
138
|
+
|
139
|
+
end
|
140
|
+
|
141
|
+
class Sources
|
142
|
+
|
143
|
+
def initialize
|
144
|
+
@log = Log.new
|
145
|
+
@sources = []
|
146
|
+
IO.readlines(DIGGIT_SOURCES).each{ |line| @sources << line.strip }
|
147
|
+
end
|
148
|
+
|
149
|
+
def size
|
150
|
+
return @sources.size
|
151
|
+
end
|
152
|
+
|
153
|
+
def save
|
154
|
+
File.open(DIGGIT_SOURCES, "w") do |f|
|
155
|
+
@sources.each{ |s| f.puts(s) }
|
156
|
+
end
|
157
|
+
end
|
158
|
+
|
159
|
+
def add(url)
|
160
|
+
unless @sources.include?(url)
|
161
|
+
@sources << url
|
162
|
+
@log.init(url)
|
163
|
+
save
|
164
|
+
end
|
165
|
+
end
|
166
|
+
|
167
|
+
def rem(source_def)
|
168
|
+
url = url(source_def)
|
169
|
+
@sources.delete(url)
|
170
|
+
@log.rem(url)
|
171
|
+
save
|
172
|
+
end
|
173
|
+
|
174
|
+
def get(source_def)
|
175
|
+
hash(url(source_def))
|
176
|
+
end
|
177
|
+
|
178
|
+
def get_all(source_defs, filter={})
|
179
|
+
sources = []
|
180
|
+
if source_defs.nil? || source_defs.empty?
|
181
|
+
sources = hashes
|
182
|
+
else
|
183
|
+
sources = source_defs.map{ |d| hash(d) }
|
184
|
+
end
|
185
|
+
sources = sources.select{ |s| s[:log][:state] == filter[:state] } if (filter.has_key?(:state))
|
186
|
+
sources = sources.select{ |s| s[:log][:error].empty? != filter[:error] } if (filter.has_key?(:error))
|
187
|
+
return sources
|
188
|
+
end
|
189
|
+
|
190
|
+
def update(source_hash)
|
191
|
+
@log.update(source_hash)
|
192
|
+
end
|
193
|
+
|
194
|
+
def url(source_def)
|
195
|
+
url = source_def
|
196
|
+
if /\d+/.match(source_def)
|
197
|
+
idx = source_def.to_i - 1
|
198
|
+
raise "Wrong source identifier" if idx < 0 || idx >= @sources.size
|
199
|
+
url = @sources[source_def.to_i - 1]
|
200
|
+
end
|
201
|
+
url
|
202
|
+
end
|
203
|
+
|
204
|
+
def hashes
|
205
|
+
@sources.map{ |s| hash(s) }
|
206
|
+
end
|
207
|
+
|
208
|
+
def hash(url)
|
209
|
+
{url: url, folder: folder(url), log: @log.log(url)}
|
210
|
+
end
|
211
|
+
|
212
|
+
def folder(url)
|
213
|
+
File.expand_path(url.gsub(/[^[\w-]]+/, "_"), SOURCES_FOLDER)
|
214
|
+
end
|
215
|
+
|
216
|
+
end
|
217
|
+
|
218
|
+
class Log
|
219
|
+
|
220
|
+
def initialize
|
221
|
+
@log = Oj.load_file(DIGGIT_LOG)
|
222
|
+
end
|
223
|
+
|
224
|
+
def save
|
225
|
+
Oj.to_file(DIGGIT_LOG, @log)
|
226
|
+
end
|
227
|
+
|
228
|
+
def init(url)
|
229
|
+
unless @log.has_key?(url)
|
230
|
+
@log[url] = default_log
|
231
|
+
save
|
232
|
+
end
|
233
|
+
end
|
234
|
+
|
235
|
+
def update(hash)
|
236
|
+
@log[hash[:url]] = hash[:log]
|
237
|
+
save
|
238
|
+
end
|
239
|
+
|
240
|
+
def rem(url)
|
241
|
+
@log.delete(url)
|
242
|
+
save
|
243
|
+
end
|
244
|
+
|
245
|
+
def log(url)
|
246
|
+
return @log[url]
|
247
|
+
end
|
248
|
+
|
249
|
+
def default_log
|
250
|
+
return {state: :new, error: [], analyses: []}
|
251
|
+
end
|
252
|
+
|
253
|
+
end
|
254
|
+
|
255
|
+
class Diggit
|
256
|
+
attr_accessor :sources, :config, :root
|
257
|
+
|
258
|
+
def initialize(*args)
|
259
|
+
super
|
260
|
+
@root = FileUtils.pwd
|
261
|
+
@sources = Sources.new
|
262
|
+
@config = Config.new
|
263
|
+
load_plugins
|
264
|
+
end
|
265
|
+
|
266
|
+
def load_plugins
|
267
|
+
globals = File.expand_path(INCLUDES_FOLDER,File.expand_path('..',File.dirname(File.realpath(__FILE__))))
|
268
|
+
Dir.glob("#{globals}/**/*.rb").each{ |f| require f }
|
269
|
+
end
|
270
|
+
|
271
|
+
end
|
272
|
+
|
273
|
+
end
|
data/spec/diggit_spec.rb
ADDED
@@ -0,0 +1,129 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
require 'fileutils'
|
3
|
+
require 'oj'
|
4
|
+
|
5
|
+
FileUtils.rm_rf('tmp')
|
6
|
+
FileUtils.mkdir('tmp')
|
7
|
+
FileUtils.cd('tmp')
|
8
|
+
|
9
|
+
TEST_URL = "https://github.com/jrfaller/test-git"
|
10
|
+
TEST_FOLDER = "https_github_com_jrfaller_test-git"
|
11
|
+
WRONG_URL = "foo"
|
12
|
+
|
13
|
+
RSpec.describe Diggit::Cli::DiggitCli do
|
14
|
+
|
15
|
+
it "should init a diggit folder" do
|
16
|
+
result = capture(:stdout) { Diggit::Cli::DiggitCli.start(["init"]) }
|
17
|
+
expect(result).to include("folder initialized")
|
18
|
+
expect(sources).to be_empty
|
19
|
+
expect(log).to be_empty
|
20
|
+
expect(config).to eq({analyses: [], addons: [], joins: [], options: {}})
|
21
|
+
end
|
22
|
+
|
23
|
+
it "should add an url" do
|
24
|
+
Diggit::Cli::DiggitCli.start(["sources", "add", TEST_URL])
|
25
|
+
expect(sources).to include(TEST_URL)
|
26
|
+
end
|
27
|
+
|
28
|
+
it "should add another url" do
|
29
|
+
Diggit::Cli::DiggitCli.start(["sources", "add", WRONG_URL])
|
30
|
+
expect(sources).to include(WRONG_URL)
|
31
|
+
end
|
32
|
+
|
33
|
+
it "should display the status" do
|
34
|
+
result = capture(:stdout) { Diggit::Cli::DiggitCli.start(["status"]) }
|
35
|
+
expect(result).to include("2 new (0 errors)")
|
36
|
+
end
|
37
|
+
|
38
|
+
it "should add an analysis" do
|
39
|
+
Diggit::Cli::DiggitCli.start(["analyses", "add", "TestAnalysis"])
|
40
|
+
expect(config[:analyses]).to eq(["TestAnalysis"])
|
41
|
+
end
|
42
|
+
|
43
|
+
it "should add an addon" do
|
44
|
+
Diggit::Cli::DiggitCli.start(["addons", "add", "TestAddon"])
|
45
|
+
expect(config[:addons]).to eq(["TestAddon"])
|
46
|
+
end
|
47
|
+
|
48
|
+
it "should add a join" do
|
49
|
+
Diggit::Cli::DiggitCli.start(["joins", "add", "TestJoin"])
|
50
|
+
expect(config[:joins]).to eq(["TestJoin"])
|
51
|
+
end
|
52
|
+
|
53
|
+
it "should perform clones on all urls, handling errors" do
|
54
|
+
results = capture(:stdout) { Diggit::Cli::DiggitCli.start(["perform", "clones"]) }
|
55
|
+
expect(results).to include("#{TEST_URL} cloned")
|
56
|
+
expect(log[TEST_URL]).to include(state: :cloned, error: {})
|
57
|
+
expect(File.exist?(File.expand_path(TEST_FOLDER,Diggit::SOURCES_FOLDER))).to be true
|
58
|
+
expect(results).to include("error cloning foo")
|
59
|
+
expect(log[WRONG_URL][:state]).to eq(:new)
|
60
|
+
expect(log[WRONG_URL][:error]).to include(name: "Rugged::NetworkError")
|
61
|
+
end
|
62
|
+
|
63
|
+
it "should display info on a regular url" do
|
64
|
+
results = capture(:stdout) { Diggit::Cli::DiggitCli.start(["sources", "info", TEST_URL]) }
|
65
|
+
expect(results).to include("cloned")
|
66
|
+
expect(results).to include(TEST_URL)
|
67
|
+
end
|
68
|
+
|
69
|
+
it "should display info and error on an url in error" do
|
70
|
+
results = capture(:stdout) { Diggit::Cli::DiggitCli.start(["sources", "info", WRONG_URL]) }
|
71
|
+
expect(results).to include("new")
|
72
|
+
expect(results).to include(WRONG_URL)
|
73
|
+
expect(results).to include("error")
|
74
|
+
expect(results).to include("Rugged::NetworkError")
|
75
|
+
end
|
76
|
+
|
77
|
+
it "should display all errors" do
|
78
|
+
results = capture(:stdout) { Diggit::Cli::DiggitCli.start(["sources", "errors"]) }
|
79
|
+
expect(results).to include("new")
|
80
|
+
expect(results).to include(WRONG_URL)
|
81
|
+
expect(results).to include("error")
|
82
|
+
expect(results).to include("Rugged::NetworkError")
|
83
|
+
end
|
84
|
+
|
85
|
+
it "should remove urls" do
|
86
|
+
Diggit::Cli::DiggitCli.start(["sources", "rem", WRONG_URL])
|
87
|
+
expect(sources).to_not include(WRONG_URL)
|
88
|
+
end
|
89
|
+
|
90
|
+
it "should perform analyses" do
|
91
|
+
results = capture(:stdout) { Diggit::Cli::DiggitCli.start(["perform", "analyses"]) }
|
92
|
+
expect(results).to include("TestAnalysis performed")
|
93
|
+
expect(results).to include("source #{TEST_URL} analyzed")
|
94
|
+
expect(log[TEST_URL][:state]).to eq(:finished)
|
95
|
+
end
|
96
|
+
|
97
|
+
it "should perform joins" do
|
98
|
+
results = capture(:stdout) { Diggit::Cli::DiggitCli.start(["perform", "joins"]) }
|
99
|
+
expect(results).to include("TestJoin performed")
|
100
|
+
expect(results).to include("joins performed")
|
101
|
+
end
|
102
|
+
|
103
|
+
it "should clean joins" do
|
104
|
+
results = capture(:stdout) { Diggit::Cli::DiggitCli.start(["clean", "joins"]) }
|
105
|
+
expect(results).to include("TestJoin cleaned")
|
106
|
+
end
|
107
|
+
|
108
|
+
it "should clean analyses" do
|
109
|
+
results = capture(:stdout) { Diggit::Cli::DiggitCli.start(["clean", "analyses"]) }
|
110
|
+
expect(results).to include("TestAnalysis cleaned on #{TEST_URL}")
|
111
|
+
expect(log[TEST_URL][:state]).to eq(:cloned)
|
112
|
+
end
|
113
|
+
|
114
|
+
it "should remove a join" do
|
115
|
+
Diggit::Cli::DiggitCli.start(["joins", "rem", "TestJoin"])
|
116
|
+
expect(config[:addons]).not_to include("TestJoin")
|
117
|
+
end
|
118
|
+
|
119
|
+
it "should remove an analysis" do
|
120
|
+
Diggit::Cli::DiggitCli.start(["analyses", "rem", "TestAnalysis"])
|
121
|
+
expect(config[:analyses]).not_to include("TestAnalysis")
|
122
|
+
end
|
123
|
+
|
124
|
+
it "should remove an addon" do
|
125
|
+
Diggit::Cli::DiggitCli.start(["addons", "rem", "TestAddon"])
|
126
|
+
expect(config[:addons]).not_to include("TestAddon")
|
127
|
+
end
|
128
|
+
|
129
|
+
end
|
data/spec/spec_helper.rb
ADDED
@@ -0,0 +1,31 @@
|
|
1
|
+
require 'oj'
|
2
|
+
|
3
|
+
require_relative('../lib/diggit_cli')
|
4
|
+
|
5
|
+
$0 = "grit"
|
6
|
+
ARGV.clear
|
7
|
+
|
8
|
+
def config
|
9
|
+
return Oj.load_file(Diggit::DIGGIT_RC)
|
10
|
+
end
|
11
|
+
|
12
|
+
def log
|
13
|
+
return Oj.load_file(Diggit::DIGGIT_LOG)
|
14
|
+
end
|
15
|
+
|
16
|
+
def sources
|
17
|
+
return IO.readlines(Diggit::DIGGIT_SOURCES).map{ |line| line.strip }
|
18
|
+
end
|
19
|
+
|
20
|
+
def capture(stream)
|
21
|
+
begin
|
22
|
+
stream = stream.to_s
|
23
|
+
eval "$#{stream} = StringIO.new"
|
24
|
+
yield
|
25
|
+
result = eval("$#{stream}").string
|
26
|
+
ensure
|
27
|
+
eval("$#{stream} = #{stream.upcase}")
|
28
|
+
end
|
29
|
+
|
30
|
+
result
|
31
|
+
end
|
metadata
ADDED
@@ -0,0 +1,120 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: diggit
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 1.0.0
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- Jean-Rémy Falleri
|
8
|
+
- Matthieu Foucault
|
9
|
+
autorequire:
|
10
|
+
bindir: bin
|
11
|
+
cert_chain: []
|
12
|
+
date: 2014-10-27 00:00:00.000000000 Z
|
13
|
+
dependencies:
|
14
|
+
- !ruby/object:Gem::Dependency
|
15
|
+
name: rugged
|
16
|
+
requirement: !ruby/object:Gem::Requirement
|
17
|
+
requirements:
|
18
|
+
- - "~>"
|
19
|
+
- !ruby/object:Gem::Version
|
20
|
+
version: '0.21'
|
21
|
+
type: :runtime
|
22
|
+
prerelease: false
|
23
|
+
version_requirements: !ruby/object:Gem::Requirement
|
24
|
+
requirements:
|
25
|
+
- - "~>"
|
26
|
+
- !ruby/object:Gem::Version
|
27
|
+
version: '0.21'
|
28
|
+
- !ruby/object:Gem::Dependency
|
29
|
+
name: oj
|
30
|
+
requirement: !ruby/object:Gem::Requirement
|
31
|
+
requirements:
|
32
|
+
- - "~>"
|
33
|
+
- !ruby/object:Gem::Version
|
34
|
+
version: '2.10'
|
35
|
+
type: :runtime
|
36
|
+
prerelease: false
|
37
|
+
version_requirements: !ruby/object:Gem::Requirement
|
38
|
+
requirements:
|
39
|
+
- - "~>"
|
40
|
+
- !ruby/object:Gem::Version
|
41
|
+
version: '2.10'
|
42
|
+
- !ruby/object:Gem::Dependency
|
43
|
+
name: thor
|
44
|
+
requirement: !ruby/object:Gem::Requirement
|
45
|
+
requirements:
|
46
|
+
- - "~>"
|
47
|
+
- !ruby/object:Gem::Version
|
48
|
+
version: '0.19'
|
49
|
+
type: :runtime
|
50
|
+
prerelease: false
|
51
|
+
version_requirements: !ruby/object:Gem::Requirement
|
52
|
+
requirements:
|
53
|
+
- - "~>"
|
54
|
+
- !ruby/object:Gem::Version
|
55
|
+
version: '0.19'
|
56
|
+
- !ruby/object:Gem::Dependency
|
57
|
+
name: rspec
|
58
|
+
requirement: !ruby/object:Gem::Requirement
|
59
|
+
requirements:
|
60
|
+
- - "~>"
|
61
|
+
- !ruby/object:Gem::Version
|
62
|
+
version: '3.1'
|
63
|
+
type: :development
|
64
|
+
prerelease: false
|
65
|
+
version_requirements: !ruby/object:Gem::Requirement
|
66
|
+
requirements:
|
67
|
+
- - "~>"
|
68
|
+
- !ruby/object:Gem::Version
|
69
|
+
version: '3.1'
|
70
|
+
description: |
|
71
|
+
The Diggit repository analysis tool is a neat swiss knife to enable the analysis of many Git repositories.
|
72
|
+
email: jr.falleri@gmail.com
|
73
|
+
executables:
|
74
|
+
- dgit
|
75
|
+
extensions: []
|
76
|
+
extra_rdoc_files: []
|
77
|
+
files:
|
78
|
+
- LICENSE
|
79
|
+
- README.md
|
80
|
+
- bin/dgit
|
81
|
+
- includes/addons/db.rb
|
82
|
+
- includes/addons/output.rb
|
83
|
+
- includes/addons/test.rb
|
84
|
+
- includes/analyses/authors.rb
|
85
|
+
- includes/analyses/cloc.rb
|
86
|
+
- includes/analyses/diff_stats.rb
|
87
|
+
- includes/analyses/metadata.rb
|
88
|
+
- includes/analyses/pom.rb
|
89
|
+
- includes/analyses/test.rb
|
90
|
+
- includes/joins/diff_size.rb
|
91
|
+
- includes/joins/test.rb
|
92
|
+
- lib/diggit_cli.rb
|
93
|
+
- lib/diggit_core.rb
|
94
|
+
- spec/diggit_spec.rb
|
95
|
+
- spec/spec_helper.rb
|
96
|
+
homepage: https://github.com/jrfaller/diggit
|
97
|
+
licenses:
|
98
|
+
- LGPL
|
99
|
+
metadata: {}
|
100
|
+
post_install_message:
|
101
|
+
rdoc_options: []
|
102
|
+
require_paths:
|
103
|
+
- lib
|
104
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
105
|
+
requirements:
|
106
|
+
- - "~>"
|
107
|
+
- !ruby/object:Gem::Version
|
108
|
+
version: '2.1'
|
109
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
110
|
+
requirements:
|
111
|
+
- - ">="
|
112
|
+
- !ruby/object:Gem::Version
|
113
|
+
version: '0'
|
114
|
+
requirements: []
|
115
|
+
rubyforge_project:
|
116
|
+
rubygems_version: 2.2.2
|
117
|
+
signing_key:
|
118
|
+
specification_version: 4
|
119
|
+
summary: A Git repository analysis tool.
|
120
|
+
test_files: []
|