-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathsna.rb
executable file
·236 lines (218 loc) · 8.81 KB
/
sna.rb
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
#!/usr/bin/env ruby
require 'google/api_client'
require 'github_api'
require 'optparse'
require 'igraph'
require 'json'
options={}
OptionParser.new do |opts|
opts.banner = "Usage: ./sna.rb [options]"
opts.on("-u", "--update_n [NUMBER_REPOS]", Integer, "Updates the top N repositories(default 100)") do |n|
options[:update] = n || 100
end
opts.on("-n", "--number [NUMBER_REPOS]", Integer, "Query the top N repositories(default 100)") do |n|
options[:query] = true
options[:number] = n || 100
end
opts.on("-m", "--month", "Query monthly snapshots back to 4/2012") do
options[:month] = true
end
opts.on("-q", "--query [OWNER/REPO]", String, "Query a single repository") do |r|
options[:query] = true
options[:owner], options[:repo] = r.split("/", 2)
end
opts.on_tail("-h", "--help", "Show this help message") do
puts opts
exit
end
end.parse!
class RepoGraph
MAX_REPOS=100000
attr_accessor :graph
@@github = Github.new(oauth_token: "d72762df620b07c1ca9dab8b62b3935087d71e1c")
@@client = Google::APIClient.new(application_name: "SNA", application_version: "0.5")
@@bq = @@client.discovered_api("bigquery", "v2")
key = Google::APIClient::PKCS12.load_key("client.p12", "notasecret")
@@client.authorization = Signet::OAuth2::Client.new(token_credential_uri: 'https://accounts.google.com/o/oauth2/token',
audience: 'https://accounts.google.com/o/oauth2/token',
scope: 'https://www.googleapis.com/auth/bigquery',
issuer: '[email protected]',
signing_key: key)
@@client.authorization.fetch_access_token!
def initialize(user, repo, by_month, single_lookup)
@user = user
@repo = repo
@users = {}
@uid = 0
lookup_repo = single_lookup ? "[githubarchive:github.timeline]" : "[mygithubarchives.top_repo_info]"
query = <<-EOF
SELECT actor, created_at, payload_action, type, payload_commit, payload_number, url, repository_url, repository_name, repository_owner
FROM #{lookup_repo}
WHERE repository_owner='#{user}' AND repository_name='#{repo}' AND PARSE_UTC_USEC(created_at) >= PARSE_UTC_USEC('2012-04-01 00:00:00')
EOF
#Retrieves all necessary info for a repo and sorts it by event type
puts "Querying for #{user} #{repo}"
@repo_info, @repo_schema = RepoGraph.get_json_query(query)
@dir = "output_files/graphs/#{@user}_#{@repo}"
Dir.mkdir(@dir) unless File.exists?(@dir)
data_start = Date.new(2012,4)
months = 1
if by_month
months += (Date.today.year - data_start.year)*12 + (Date.today.month - data_start.month)
end
months.times do |m|
generate_repo_graph(m)
end
end
#Retrieves the url's of the top 100 repositories in April
#Retrieves all necessary info on the top repositories
#Processes approximately 15GB of data
def self.update_top(n)
query = <<-EOF
SELECT repository_name, COUNT(repository_name) as activities, repository_owner, repository_url
FROM [githubarchive:github.timeline]
WHERE (type='CommitCommentEvent' OR type='IssueCommentEvent' OR type='IssuesEvent' OR type='PullRequestEvent' OR type='PullRequestReviewCommentEvent')
AND PARSE_UTC_USEC(created_at) >= PARSE_UTC_USEC('2012-04-01 00:00:00')
GROUP BY repository_name, repository_owner, repository_url
ORDER BY activities DESC
LIMIT #{n}
EOF
`bq rm -f mygithubarchives.top_repos`
puts `bq query --destination_table=mygithubarchives.top_repos "#{query}"`
query2 = <<-EOF
SELECT actor, created_at, payload_action, type, payload_commit, payload_number, url, repository_url, repository_name, repository_owner
FROM [githubarchive:github.timeline]
WHERE repository_url IN (SELECT repository_url FROM mygithubarchives.top_repos) AND PARSE_UTC_USEC(created_at) >= PARSE_UTC_USEC('2012-04-01 00:00:00')
AND (type='CommitCommentEvent' OR type='IssueCommentEvent' OR type='IssuesEvent' OR type='PullRequestEvent' OR type='PullRequestReviewCommentEvent');
EOF
`bq rm -f mygithubarchives.top_repo_info`
puts `bq query --destination_table=mygithubarchives.top_repo_info "#{query2}"`
end
def self.get_top(n, m)
query = <<-EOF
SELECT repository_url, repository_name, repository_owner, activities
FROM [mygithubarchives.top_repos]
ORDER BY activities DESC
LIMIT #{n}
EOF
top, schema = get_json_query(query)
top.each do |row|
RepoGraph.new(row.f[schema["repository_owner"]].v, row.f[schema["repository_name"]].v, m, false)
end
end
#Schema:
#[actor, created_at, payload_action, payload_commit, payload_number,
#repository_name, repository_owner, repository_url, type, url]
def self.get_json_query(query)
#Makes the call to big query and parses the JSON returned
begin
data = @@client.execute(api_method: @@bq.jobs.query, body_object: {query: query},
parameters: {projectId: "githubsna"}).data
puts "Used #{data["total_bytes_processed"]} bytes of data"
puts "Returned #{data["total_rows"]} rows"
schema = {}
data.schema.fields.each_with_index do |f,i|
schema[f.name]=i
end
rescue
puts "Retrying #{query}"
retry
end
[data.rows, schema]
end
def r(query_result, key)
query_result.f[@repo_schema[key]].v
end
def generate_repo_graph(month)
graph = IGraph.new([],false)
#Retrieve snapshots at the first day of the month
start_date = Date.new(Date.today.year, Date.today.month).prev_month(month)
#Initialize nodes first
graph.add_vertices(@users.values)
monthly_repo_info = @repo_info.select do |e|
begin
Date.parse(r(e,"created_at")) <
Date.today.prev_month(month)
rescue
p e
exit
end
end.group_by{|e| r(e,"type")}
monthly_repo_info.default=[]
commit_comments = monthly_repo_info["CommitCommentEvent"]
#Get all the shas and commits first, and make them uniq, to prevent
#retrieving the same one multiple times
shas = commit_comments.collect{|c| r(c, "payload_commit")}.uniq
commits = shas.collect do |sha|
begin
@@github.repos.commits.get(@user, @repo, sha)
rescue
nil
end
end.compact.uniq
commit_users = {}
#Sorts the commits by sha for quick retrieval
commits.each do |c|
begin
commit_users[c["sha"]]=c["committer"]["login"]
rescue
commit_users[c["sha"]]=c["commit"]["committer"]["name"]
end
end
#Iterate through all comments, making an edge between the comment creator
#and the committer
commit_comments.each do |cc|
make_edge(graph, r(cc,"actor"), commit_users[r(cc,"payload_commit")])
end
#Pull down all the issues and events at the same time, because we handle
#them the same way
issues_pulls = monthly_repo_info["IssuesEvent"] + monthly_repo_info["PullRequestEvent"]
#Group the issues and pulls by payload_action because we will be handling
#closed ones and opened ones differently, and they need to reference
#each other
coip = issues_pulls.group_by{|ip| r(ip,"payload_action")}
coip.default = []
open_users={}
coip["opened"].each{|o| open_users[r(o,"payload_number")]=r(o,"actor")}
coip["closed"].each{|c| make_edge(graph, r(c,"actor"), open_users[r(c,"payload_number")])}
issue_comments = monthly_repo_info["IssueCommentEvent"]
#Have to retrieve payload number from url for next two because it does not show up normally
issue_comments.each do |ic|
make_edge(graph, r(ic,"actor"), open_users[r(ic,"url").match(/\/issues\/(\d+)#/)[1]])
end
pr_comments = monthly_repo_info["PullRequestReviewCommentEvent"]
pr_comments.each do |pc|
make_edge(graph, r(pc,"actor"), open_users[r(pc,"url").match(/\/pull\/(\d+)#/)[1]])
end
graph.write_graph_graphml(File.open("#{@dir}/#{@user}_#{@repo}_#{start_date.year}_#{start_date.month.to_s.rjust(2,'0')}.graphml", 'w'))
end
def make_edge(graph, u1n, u2n)
return if u1n==u2n || u1n.nil? || u1n.empty? || u2n.nil? || u2n.empty?
@users[u1n] ||= {"label" => u1n}
@users[u2n] ||= {"label" => u2n}
u1 = @users[u1n]
u2 = @users[u2n]
graph.add_vertices([u1,u2])
if graph.are_connected(u1,u2)
graph.set_edge_attr(u1,u2,{"weight"=>graph.get_edge_attr(u1,u2)["weight"]+1})
else
graph.add_edges([u1,u2],[{"weight"=>1}])
end
end
end
if options[:update]
if options[:update].class == Fixnum
RepoGraph.update_top(options[:update])
else
raise ArgumentError
end
end
if options[:query]
if options[:number]
RepoGraph.get_top(options[:number], options[:month])
elsif options[:owner] && options[:repo]
RepoGraph.new(options[:owner], options[:repo], options[:month], true)
else
raise ArgumentError
end
end