sakuric 2.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (407) hide show
  1. data/CHANGELOG +13 -0
  2. data/Gemfile +22 -0
  3. data/Gemfile.lock +60 -0
  4. data/HISTORY.yml +13 -0
  5. data/INSTALL +5 -0
  6. data/LICENSE +1 -0
  7. data/Makefile +19 -0
  8. data/Manifest +405 -0
  9. data/README.md +72 -0
  10. data/Rakefile +4 -0
  11. data/TODO +3 -0
  12. data/VERSION +1 -0
  13. data/bashrc +30 -0
  14. data/bashrc.d/00-functions.include +7 -0
  15. data/bashrc.d/01-sakura_checks.include +15 -0
  16. data/bashrc.d/02-uname.include +11 -0
  17. data/bashrc.d/80-ruby.include +4 -0
  18. data/bashrc.d/99-dirs.include +11 -0
  19. data/bashrc.d/aliases.include +20 -0
  20. data/bashrc.d/common.include +7 -0
  21. data/bashrc.d/local.d/README +1 -0
  22. data/bashrc.d/local.d/nothing.include +0 -0
  23. data/bashrc.d/uname.d/Darwin +6 -0
  24. data/bashrc.d/uname.d/Linux +0 -0
  25. data/bashrc.local.sample +11 -0
  26. data/bin/10 +7 -0
  27. data/bin/10times +7 -0
  28. data/bin/1every +22 -0
  29. data/bin/1everyN +22 -0
  30. data/bin/1suN +22 -0
  31. data/bin/2 +5 -0
  32. data/bin/act +24 -0
  33. data/bin/amarelo +2 -0
  34. data/bin/arancio +4 -0
  35. data/bin/arcobaleno +10 -0
  36. data/bin/bianco +3 -0
  37. data/bin/black +2 -0
  38. data/bin/brew-install +3 -0
  39. data/bin/cache +50 -0
  40. data/bin/cache3 +50 -0
  41. data/bin/cheorae +4 -0
  42. data/bin/colorastdouterr +37 -0
  43. data/bin/cp2desktop +3 -0
  44. data/bin/create-git-repo.sh +24 -0
  45. data/bin/csoe +37 -0
  46. data/bin/data +3 -0
  47. data/bin/derive +90 -0
  48. data/bin/dimmi +31 -0
  49. data/bin/dimmiora +4 -0
  50. data/bin/doppioni +48 -0
  51. data/bin/du- +9 -0
  52. data/bin/duplicates +48 -0
  53. data/bin/echodo +7 -0
  54. data/bin/ensure_line_present +82 -0
  55. data/bin/epoch2date +5 -0
  56. data/bin/fanout +100 -0
  57. data/bin/find-broken-symlink +26 -0
  58. data/bin/find-duplicates +48 -0
  59. data/bin/find-special-chars-within-filenames +12 -0
  60. data/bin/fix-terminal +5 -0
  61. data/bin/gce-create-centos-instance +5 -0
  62. data/bin/gce-create-custom-image +8 -0
  63. data/bin/gce-curl-get-instances-for-project +7 -0
  64. data/bin/gce-get-token +3 -0
  65. data/bin/gce-getproject-metadata +35 -0
  66. data/bin/gce-public-ip +4 -0
  67. data/bin/gcutil-fetch +36 -0
  68. data/bin/giallo +2 -0
  69. data/bin/git-clone-in-non-empty-dir +21 -0
  70. data/bin/git-ignore +3 -0
  71. data/bin/git-repos +98 -0
  72. data/bin/git-repos.py +99 -0
  73. data/bin/gitinfo +2 -0
  74. data/bin/google-spreadsheet-cat +40 -0
  75. data/bin/google_ips +5 -0
  76. data/bin/gray +3 -0
  77. data/bin/green +2 -0
  78. data/bin/grigio +3 -0
  79. data/bin/gsutil-debug-bucket +12 -0
  80. data/bin/gsutil-get-oauth2-token +4 -0
  81. data/bin/gugol-image +74 -0
  82. data/bin/hamachi-ubuntu-install +17 -0
  83. data/bin/head-1^-1 +5 -0
  84. data/bin/ifdownup +13 -0
  85. data/bin/immature/ok2 +25 -0
  86. data/bin/immature/twice-still-broken +9 -0
  87. data/bin/install-gic-repo +13 -0
  88. data/bin/install-sakura-on-debian-latest +6 -0
  89. data/bin/iphoto_big_files.sh +37 -0
  90. data/bin/itunes +140 -0
  91. data/bin/jason-xpath.rb +54 -0
  92. data/bin/jsawk +1128 -0
  93. data/bin/json-xpath.rb +54 -0
  94. data/bin/keepup +13 -0
  95. data/bin/lsof-top10 +3 -0
  96. data/bin/minicook +218 -0
  97. data/bin/mv2. +2 -0
  98. data/bin/mv2desktop +2 -0
  99. data/bin/mv2dropbox +2 -0
  100. data/bin/mv2here +2 -0
  101. data/bin/mvto +39 -0
  102. data/bin/nero +2 -0
  103. data/bin/never_as_root +18 -0
  104. data/bin/not +16 -0
  105. data/bin/ok +42 -0
  106. data/bin/orange +4 -0
  107. data/bin/pink +2 -0
  108. data/bin/polygen-dell +7 -0
  109. data/bin/polygen-heanet +6 -0
  110. data/bin/polygen-labs +4 -0
  111. data/bin/print_key_val +4 -0
  112. data/bin/rainbow +10 -0
  113. data/bin/recipe +19 -0
  114. data/bin/red +2 -0
  115. data/bin/remember-command +140 -0
  116. data/bin/remember-command.rb +140 -0
  117. data/bin/richelp +99 -0
  118. data/bin/ricorda-comando +140 -0
  119. data/bin/rosa +2 -0
  120. data/bin/rosso +2 -0
  121. data/bin/run-parts.sh +50 -0
  122. data/bin/sakura-check-version +48 -0
  123. data/bin/sakura-init.DRAFT +5 -0
  124. data/bin/sakura-version +2 -0
  125. data/bin/sano +5 -0
  126. data/bin/sbianca +3 -0
  127. data/bin/sblua +2 -0
  128. data/bin/setterm-reset +5 -0
  129. data/bin/sgialla +2 -0
  130. data/bin/sgrigia +2 -0
  131. data/bin/split +37 -0
  132. data/bin/split.rb +37 -0
  133. data/bin/srosa +1 -0
  134. data/bin/srossa +2 -0
  135. data/bin/strip-white-spaces +2 -0
  136. data/bin/stty-sane +5 -0
  137. data/bin/sudo^-1 +18 -0
  138. data/bin/sverda +6 -0
  139. data/bin/swift-saio-install.sh +336 -0
  140. data/bin/synergyc +0 -0
  141. data/bin/synergys +0 -0
  142. data/bin/syracuse.pl +29 -0
  143. data/bin/tail-n+2 +5 -0
  144. data/bin/tellme +31 -0
  145. data/bin/tellme-time +4 -0
  146. data/bin/timeout3 +91 -0
  147. data/bin/top5 +4 -0
  148. data/bin/trim +4 -0
  149. data/bin/trova-broken-symlink +26 -0
  150. data/bin/twice +5 -0
  151. data/bin/twice.rb +5 -0
  152. data/bin/ubuntu-install-hamachi64 +17 -0
  153. data/bin/ultimo +5 -0
  154. data/bin/upload.py +2380 -0
  155. data/bin/usleep +0 -0
  156. data/bin/varia +90 -0
  157. data/bin/verde +2 -0
  158. data/bin/vermelho +2 -0
  159. data/bin/vim-whereveritis +5 -0
  160. data/bin/viola +1 -0
  161. data/bin/virsh-list-all-vms.py +21 -0
  162. data/bin/virtualbox-manage +41 -0
  163. data/bin/weekend +12 -0
  164. data/bin/whattimeisit +4 -0
  165. data/bin/whereis-ip +5 -0
  166. data/bin/white +3 -0
  167. data/bin/whitefy +3 -0
  168. data/bin/yellow +2 -0
  169. data/bin/yellowfy +2 -0
  170. data/bin/zombies +25 -0
  171. data/docz/polygen/dell.grm +100 -0
  172. data/docz/polygen/frati.grm +85 -0
  173. data/docz/polygen/heanet.grm +54 -0
  174. data/docz/richelp/gcompute.yml +52 -0
  175. data/docz/richelp/gcutil.yml +52 -0
  176. data/docz/richelp/gmail.yml +2 -0
  177. data/docz/richelp/ruby.yml +81 -0
  178. data/docz/richelp/sakura.yml +20 -0
  179. data/docz/richelp/sql.yml +12 -0
  180. data/docz/richelp/ubuntu.yml +22 -0
  181. data/docz/richelp/yaml.yml +39 -0
  182. data/docz/richelp/yml.yml +39 -0
  183. data/etc/sakura/devel.yml +36 -0
  184. data/etc/synergy.conf.example +37 -0
  185. data/images/color-sample.png +0 -0
  186. data/images/google.jpg +0 -0
  187. data/images/icons/toilet.png +0 -0
  188. data/images/sakura.jpg +0 -0
  189. data/images/sakura2.jpg +0 -0
  190. data/init.rb +5 -0
  191. data/lib/classes/arrays.rb +246 -0
  192. data/lib/classes/debug_ric.rb +196 -0
  193. data/lib/classes/fake_data.rb +8 -0
  194. data/lib/classes/ric_debug.rb +0 -0
  195. data/lib/classes/ricconf.rb +110 -0
  196. data/lib/classes/richelp.rb +70 -0
  197. data/lib/classes/strings.rb +284 -0
  198. data/lib/facter/google_compute_engine.rb +10 -0
  199. data/lib/recipes/20120411-puppet-rump-palladius.recipe +2 -0
  200. data/lib/recipes/20120726-skype.ubuntu.recipe +16 -0
  201. data/lib/recipes/20130206-puppet3.ubuntu.recipe +32 -0
  202. data/lib/recipes/20130206-redis.linux.recipe +17 -0
  203. data/lib/recipes/20130208-vanilla.linux.recipe +37 -0
  204. data/lib/recipes/Makefile +4 -0
  205. data/lib/recipes/README.md +29 -0
  206. data/lib/recipes/scripts/redis.sh +14 -0
  207. data/lib/ric.rb +156 -0
  208. data/lib/ric_colors.rb +313 -0
  209. data/lib/ric_strings.rb +285 -0
  210. data/lib/sonice-players/itunes_mac.rb +43 -0
  211. data/lib/sonice-players/itunes_win.rb +47 -0
  212. data/lib/sonice-players/mpd.rb +45 -0
  213. data/lib/sonice-players/rhythmbox.rb +38 -0
  214. data/profile +1 -0
  215. data/sakuric.gemspec +40 -0
  216. data/sbin/install-sakura-on-debian +25 -0
  217. data/sbin/make-install.sh +10 -0
  218. data/sbin/mate-createsymlink +4 -0
  219. data/sounds/0.ogg +0 -0
  220. data/sounds/0.wav +0 -0
  221. data/sounds/01.ogg +0 -0
  222. data/sounds/02.ogg +0 -0
  223. data/sounds/03.ogg +0 -0
  224. data/sounds/08.ogg +0 -0
  225. data/sounds/09.ogg +0 -0
  226. data/sounds/1.ogg +0 -0
  227. data/sounds/1.wav +0 -0
  228. data/sounds/10.ogg +0 -0
  229. data/sounds/100.ogg +0 -0
  230. data/sounds/11.ogg +0 -0
  231. data/sounds/12.ogg +0 -0
  232. data/sounds/13.ogg +0 -0
  233. data/sounds/14.ogg +0 -0
  234. data/sounds/15.ogg +0 -0
  235. data/sounds/16.ogg +0 -0
  236. data/sounds/17.ogg +0 -0
  237. data/sounds/18.ogg +0 -0
  238. data/sounds/19.ogg +0 -0
  239. data/sounds/2.ogg +0 -0
  240. data/sounds/2.wav +0 -0
  241. data/sounds/20.ogg +0 -0
  242. data/sounds/21.ogg +0 -0
  243. data/sounds/22.ogg +0 -0
  244. data/sounds/23.ogg +0 -0
  245. data/sounds/24.ogg +0 -0
  246. data/sounds/26.ogg +0 -0
  247. data/sounds/3.ogg +0 -0
  248. data/sounds/3.wav +0 -0
  249. data/sounds/30.ogg +0 -0
  250. data/sounds/34.ogg +0 -0
  251. data/sounds/4.ogg +0 -0
  252. data/sounds/4.wav +0 -0
  253. data/sounds/40.ogg +0 -0
  254. data/sounds/42.ogg +0 -0
  255. data/sounds/45.ogg +0 -0
  256. data/sounds/47.ogg +0 -0
  257. data/sounds/5.ogg +0 -0
  258. data/sounds/5.wav +0 -0
  259. data/sounds/50.ogg +0 -0
  260. data/sounds/6.ogg +0 -0
  261. data/sounds/6.wav +0 -0
  262. data/sounds/60.ogg +0 -0
  263. data/sounds/7.ogg +0 -0
  264. data/sounds/7.wav +0 -0
  265. data/sounds/70.ogg +0 -0
  266. data/sounds/8.ogg +0 -0
  267. data/sounds/8.wav +0 -0
  268. data/sounds/80.ogg +0 -0
  269. data/sounds/9.ogg +0 -0
  270. data/sounds/9.wav +0 -0
  271. data/sounds/90.ogg +0 -0
  272. data/sounds/a.ogg +0 -0
  273. data/sounds/aaahhh.ogg +0 -0
  274. data/sounds/abologna.ogg +0 -0
  275. data/sounds/adargenta.ogg +0 -0
  276. data/sounds/alzatiecammina.ogg +0 -0
  277. data/sounds/apache.ogg +0 -0
  278. data/sounds/attenzione.ogg +0 -0
  279. data/sounds/awesome.m4a +0 -0
  280. data/sounds/bazinga.mp3 +0 -0
  281. data/sounds/bazinga.ogg +0 -0
  282. data/sounds/bela regaz.wav +0 -0
  283. data/sounds/benvenuti.wav +0 -0
  284. data/sounds/benvenuti2.wav +0 -0
  285. data/sounds/bergonz.ogg +0 -0
  286. data/sounds/boh.ogg +0 -0
  287. data/sounds/buahah.ogg +0 -0
  288. data/sounds/buonasera.ogg +0 -0
  289. data/sounds/buongiorno.ogg +0 -0
  290. data/sounds/burp.ogg +0 -0
  291. data/sounds/carlo.ogg +0 -0
  292. data/sounds/cena.ogg +0 -0
  293. data/sounds/che figata.m4a +0 -0
  294. data/sounds/che figlio di puttana.m4a +0 -0
  295. data/sounds/che.ogg +0 -0
  296. data/sounds/ciao.ogg +0 -0
  297. data/sounds/ciao.wav +0 -0
  298. data/sounds/da.ogg +0 -0
  299. data/sounds/davide.ogg +0 -0
  300. data/sounds/demone.ogg +0 -0
  301. data/sounds/dhcp.ogg +0 -0
  302. data/sounds/dinuovo.ogg +0 -0
  303. data/sounds/dns.ogg +0 -0
  304. data/sounds/dopo.ogg +0 -0
  305. data/sounds/e'.ogg +0 -0
  306. data/sounds/eattivo.ogg +0 -0
  307. data/sounds/ee.ogg +0 -0
  308. data/sounds/ehi.ogg +0 -0
  309. data/sounds/eoragiu.ogg +0 -0
  310. data/sounds/eorasu.ogg +0 -0
  311. data/sounds/eripartita.ogg +0 -0
  312. data/sounds/estato.ogg +0 -0
  313. data/sounds/estatoriavviato.ogg +0 -0
  314. data/sounds/fratro.ogg +0 -0
  315. data/sounds/frozenbubble.ogg +0 -0
  316. data/sounds/funziona.ogg +0 -0
  317. data/sounds/go raibh mile maith agaibh.m4a +0 -0
  318. data/sounds/grazie.ogg +0 -0
  319. data/sounds/h1.ogg +0 -0
  320. data/sounds/hainuovaposta.ogg +0 -0
  321. data/sounds/hogiamangiato.ogg +0 -0
  322. data/sounds/host.ogg +0 -0
  323. data/sounds/il.ogg +0 -0
  324. data/sounds/ilcomputer.ogg +0 -0
  325. data/sounds/ilnumerodaleiselezionatoe.ogg +0 -0
  326. data/sounds/ilprocesso.ogg +0 -0
  327. data/sounds/ilprogramma.ogg +0 -0
  328. data/sounds/ilservizio.ogg +0 -0
  329. data/sounds/imap.ogg +0 -0
  330. data/sounds/imieisuperpoterimidiconoche.ogg +0 -0
  331. data/sounds/infunzione.ogg +0 -0
  332. data/sounds/inpunto.ogg +0 -0
  333. data/sounds/ipmon.ogg +0 -0
  334. data/sounds/laconnessioneadinternet.ogg +0 -0
  335. data/sounds/lastampante.ogg +0 -0
  336. data/sounds/lazzaron.ogg +0 -0
  337. data/sounds/ldap.ogg +0 -0
  338. data/sounds/linterfaccia.ogg +0 -0
  339. data/sounds/lucilla.ogg +0 -0
  340. data/sounds/ma vaffanculo.m4a +0 -0
  341. data/sounds/majjal.wav +0 -0
  342. data/sounds/mamma.ogg +0 -0
  343. data/sounds/mauro.ogg +0 -0
  344. data/sounds/max.ogg +0 -0
  345. data/sounds/meerda.wav +0 -0
  346. data/sounds/meno.ogg +0 -0
  347. data/sounds/merda clo.wav +0 -0
  348. data/sounds/mezza.ogg +0 -0
  349. data/sounds/mila.ogg +0 -0
  350. data/sounds/mille.ogg +0 -0
  351. data/sounds/minuti.ogg +0 -0
  352. data/sounds/named.ogg +0 -0
  353. data/sounds/no.ogg +0 -0
  354. data/sounds/nohofame.ogg +0 -0
  355. data/sounds/nonfunziona.ogg +0 -0
  356. data/sounds/nonriescoatrovare.ogg +0 -0
  357. data/sounds/nonsarebbeoradilavorare.ogg +0 -0
  358. data/sounds/nonstalavorando.ogg +0 -0
  359. data/sounds/numero.wav +0 -0
  360. data/sounds/ooohhh.ogg +0 -0
  361. data/sounds/pinger.ogg +0 -0
  362. data/sounds/pocofa.ogg +0 -0
  363. data/sounds/pop3.ogg +0 -0
  364. data/sounds/porca puttana.m4a +0 -0
  365. data/sounds/porcapupazza.ogg +0 -0
  366. data/sounds/ppp.ogg +0 -0
  367. data/sounds/pranzo.ogg +0 -0
  368. data/sounds/prego.ogg +0 -0
  369. data/sounds/prima.ogg +0 -0
  370. data/sounds/qualcosamidice.ogg +0 -0
  371. data/sounds/raggiungibile.ogg +0 -0
  372. data/sounds/riccardo.ogg +0 -0
  373. data/sounds/salsa ridge.wav +0 -0
  374. data/sounds/scusa.ogg +0 -0
  375. data/sounds/seriously man.m4a +0 -0
  376. data/sounds/si.ogg +0 -0
  377. data/sounds/sito porno con sonoro.wav +0 -0
  378. data/sounds/socmel.wav +0 -0
  379. data/sounds/sono.ogg +0 -0
  380. data/sounds/sonoacasa.ogg +0 -0
  381. data/sounds/sonole.ogg +0 -0
  382. data/sounds/sputo.ogg +0 -0
  383. data/sounds/ssh.ogg +0 -0
  384. data/sounds/stagiocando.ogg +0 -0
  385. data/sounds/statospento.ogg +0 -0
  386. data/sounds/stopaiodipalle.ogg +0 -0
  387. data/sounds/thesedicks.wav +0 -0
  388. data/sounds/tornatosu.ogg +0 -0
  389. data/sounds/trapoco.ogg +0 -0
  390. data/sounds/trequarti.ogg +0 -0
  391. data/sounds/tutti.ogg +0 -0
  392. data/sounds/una.ogg +0 -0
  393. data/sounds/unquarto.ogg +0 -0
  394. data/sounds/virgola.ogg +0 -0
  395. data/sources/c/usleep.c +50 -0
  396. data/spec/my_first_spec.rb +6 -0
  397. data/tasks/sakuric-gem.rake +21 -0
  398. data/tasks/testing.rake +15 -0
  399. data/templates/bashrc.inject +10 -0
  400. data/templates/happy_christmas.ascii_art +5 -0
  401. data/test/roodi.sh +10 -0
  402. data/test/test_mini_failing.rb +3 -0
  403. data/test/test_mini_ok.rb +3 -0
  404. data/test/test_ruby_syntax.sh +12 -0
  405. data/third-party/jsawk/README.markdown +437 -0
  406. data/third-party/jsawk/jsawk +1128 -0
  407. metadata +835 -0
Binary file
Binary file
@@ -0,0 +1,29 @@
1
+ #!/usr/bin/perl
2
+
3
+ my $SEPARATOR=" ";
4
+ my $niterations = 0;
5
+
6
+ sub siracusa {
7
+ my $t = shift;
8
+ $niterations ++ ;
9
+ if ($t == 1 ) {
10
+ return 1;
11
+ }
12
+ print "$t$SEPARATOR";
13
+ if ($t %2) {
14
+ return siracusa(3 * $t +1);
15
+ }
16
+ return siracusa($t/2);
17
+ }
18
+
19
+ print "# Simple yet neat algorithm: if n is even, I halve it. If it's odd I triple it and add 1. Look:\n";
20
+ my $arg = 0+int($ARGV[0]);
21
+ if ($arg == 0) {
22
+ $arg = 42;
23
+ }
24
+ print siracusa($arg);
25
+ #print siracusa(0 + int($ARGV[0]));
26
+ print "\n";
27
+ print "# Flight time: $niterations\n";
28
+ print "# Flight pendency: " .($niterations/$arg) ."\n";
29
+
@@ -0,0 +1,5 @@
1
+
2
+ # removes the first line from STDIN!
3
+
4
+ tail -n +2
5
+
@@ -0,0 +1,31 @@
1
+ #!/bin/bash
2
+
3
+ DIMMIPATH="$SAKURADIR/sounds/"
4
+ SUONA=/usr/bin/ogg123
5
+
6
+ if [ ! -f $SUONA ] ; then
7
+ echo "Ogg123 not installed, try: sudo apt-get install vorbis-tools"
8
+ exit 2
9
+ fi
10
+
11
+ function gestisci() {
12
+ PAROLA="$*"
13
+ if test -z "$1" ; then
14
+ return
15
+ fi
16
+ FNAME="$DIMMIPATH/$PAROLA.ogg"
17
+ RISERVA="$DIMMIPATH/boh.ogg"
18
+ if [ -f "$FNAME" ] ; then
19
+ $SUONA "$FNAME" >/dev/null 2>/dev/null
20
+ else
21
+ $SUONA "$RISERVA" >/dev/null 2>/dev/null
22
+ echo "'$FNAME' not found" >&2
23
+ fi
24
+ }
25
+
26
+ gestisci "$1"
27
+ while shift; do
28
+ if test ! -z "$1" ; then
29
+ gestisci $1
30
+ fi
31
+ done
@@ -0,0 +1,4 @@
1
+ #!/bin/sh
2
+
3
+ echo It tells you what time is it in Italian via OGG libraries..
4
+ $SAKURADIR/bin/dimmi $( date +"sonole %H ee %M minuti " )
@@ -0,0 +1,91 @@
1
+ #!/bin/bash
2
+ #
3
+ # The Bash shell script executes a command with a time-out.
4
+ # Upon time-out expiration SIGTERM (15) is sent to the process. If the signal
5
+ # is blocked, then the subsequent SIGKILL (9) terminates it.
6
+ #
7
+ # Based on the Bash documentation example.
8
+
9
+ # Hello Chet,
10
+ # please find attached a "little easier" :-) to comprehend
11
+ # time-out example. If you find it suitable, feel free to include
12
+ # anywhere: the very same logic as in the original examples/scripts, a
13
+ # little more transparent implementation to my taste.
14
+ #
15
+ # Dmitry V Golovashkin <Dmitry.Golovashkin@sas.com>
16
+
17
+ scriptName="${0##*/}"
18
+
19
+ declare -i DEFAULT_TIMEOUT=9
20
+ declare -i DEFAULT_INTERVAL=1
21
+ declare -i DEFAULT_DELAY=1
22
+
23
+ # Timeout.
24
+ declare -i timeout=DEFAULT_TIMEOUT
25
+ # Interval between checks if the process is still alive.
26
+ declare -i interval=DEFAULT_INTERVAL
27
+ # Delay between posting the SIGTERM signal and destroying the process by SIGKILL.
28
+ declare -i delay=DEFAULT_DELAY
29
+
30
+ function printUsage() {
31
+ cat <<EOF
32
+
33
+ Synopsis
34
+ $scriptName [-t timeout] [-i interval] [-d delay] command
35
+ Execute a command with a time-out.
36
+ Upon time-out expiration SIGTERM (15) is sent to the process. If SIGTERM
37
+ signal is blocked, then the subsequent SIGKILL (9) terminates it.
38
+
39
+ -t timeout
40
+ Number of seconds to wait for command completion.
41
+ Default value: $DEFAULT_TIMEOUT seconds.
42
+
43
+ -i interval
44
+ Interval between checks if the process is still alive.
45
+ Positive integer, default value: $DEFAULT_INTERVAL seconds.
46
+
47
+ -d delay
48
+ Delay between posting the SIGTERM signal and destroying the
49
+ process by SIGKILL. Default value: $DEFAULT_DELAY seconds.
50
+
51
+ As of today, Bash does not support floating point arithmetic (sleep does),
52
+ therefore all delay/time values must be integers.
53
+ EOF
54
+ }
55
+
56
+ # Options.
57
+ while getopts ":t:i:d:" option; do
58
+ case "$option" in
59
+ t) timeout=$OPTARG ;;
60
+ i) interval=$OPTARG ;;
61
+ d) delay=$OPTARG ;;
62
+ *) printUsage; exit 1 ;;
63
+ esac
64
+ done
65
+ shift $((OPTIND - 1))
66
+
67
+ # $# should be at least 1 (the command to execute), however it may be strictly
68
+ # greater than 1 if the command itself has options.
69
+ if (($# == 0 || interval <= 0)); then
70
+ printUsage
71
+ exit 1
72
+ fi
73
+
74
+ # kill -0 pid Exit code indicates if a signal may be sent to $pid process.
75
+ (
76
+ ((t = timeout))
77
+
78
+ while ((t > 0)); do
79
+ sleep $interval
80
+ kill -0 $$ || exit 0
81
+ ((t -= interval))
82
+ done
83
+
84
+ # Be nice, post SIGTERM first.
85
+ # The 'exit 0' below will be executed if any preceeding command fails.
86
+ kill -s SIGTERM $$ && kill -0 $$ || exit 0
87
+ sleep $delay
88
+ kill -s SIGKILL $$
89
+ ) 2> /dev/null &
90
+
91
+ exec "$@"
@@ -0,0 +1,4 @@
1
+ #!/bin/sh
2
+
3
+ NUMBER=${1:-5}
4
+ ps aux | grep -v USER | awk '{print $3 "\t" $2 "\t" $11}' | sort -nr | head -n $NUMBER
@@ -0,0 +1,4 @@
1
+ #!/bin/sh
2
+
3
+ # trims left and right parts of every line :)
4
+ sed -e 's/^[ \t]*//g' | sed -e 's/[ \t]*$//g'
@@ -0,0 +1,26 @@
1
+ #!/bin/bash
2
+
3
+ # This finds everything which is in the current dir (and below) and looks for broken links.
4
+
5
+ WHERE=${1:-.}
6
+ CANCELLA=${2:-noncancellare}
7
+
8
+ usage() {
9
+ echo "Usage: $(basename $0) [<DIRECTORY>] [--delete]"
10
+ echo ""
11
+ echo " This scripts finds the symlinks in the specified DIR tree."
12
+ echo " If directory is not specified, current dir ('.') is assumed."
13
+ echo " With --delete option, it'll also delete them"
14
+ }
15
+
16
+ if echo $2 | egrep -q '^--delete$' ; then
17
+ blue "# I DELETE symlinks from '$WHERE' in giu:" >&2
18
+ find $WHERE -type l ! -exec test -r {} \; -print0 | xargs -0 rm
19
+ green "Done"
20
+ else
21
+ yellow "# I look for symlinks from '$WHERE' downwards:" >&2
22
+ find $WHERE -type l ! -exec test -r {} \; -print
23
+ usage
24
+ # echo "+ If you want to delete em, please type: '$0 $WHERE --delete'"
25
+ fi
26
+
@@ -0,0 +1,5 @@
1
+ #!/usr/bin/ruby
2
+
3
+ (1..2).each {|i|
4
+ system ARGV.join(' ')
5
+ }
@@ -0,0 +1,5 @@
1
+ #!/usr/bin/ruby
2
+
3
+ (1..2).each {|i|
4
+ system ARGV.join(' ')
5
+ }
@@ -0,0 +1,17 @@
1
+
2
+ # for 64bit
3
+ HAMACHI64=logmein-hamachi_2.1.0.17-1_amd64.deb
4
+ HAMACHI32=logmein-hamachi_2.1.0.17-1_i386.deb
5
+
6
+ HAMACHI=$HAMACHI32
7
+
8
+ echo Installing hamachi arch: $HAMACHI ...
9
+
10
+ sudo add-apt-repository ppa:webupd8team/haguichi
11
+ wget https://secure.logmein.com/labs/$HAMACHI &&
12
+ dpkg -i $HAMACHI &&
13
+ echo This should try to install hamachi and fail but dont worry.
14
+ sudo apt-get -f install &&
15
+ sudo apt-get update &&
16
+ sudo apt-get install -y hamachi haguichi
17
+
@@ -0,0 +1,5 @@
1
+
2
+
3
+ ls -lart $* |egrep -v ^d | ripulisci | tail -1 | while read A B C D E F G H NOME ; do
4
+ echo $NOME
5
+ done
@@ -0,0 +1,2380 @@
1
+ #!/usr/bin/env python
2
+ # coding: utf-8
3
+ #
4
+ # Copyright 2007 Google Inc.
5
+ #
6
+ # Licensed under the Apache License, Version 2.0 (the "License");
7
+ # you may not use this file except in compliance with the License.
8
+ # You may obtain a copy of the License at
9
+ #
10
+ # http://www.apache.org/licenses/LICENSE-2.0
11
+ #
12
+ # Unless required by applicable law or agreed to in writing, software
13
+ # distributed under the License is distributed on an "AS IS" BASIS,
14
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15
+ # See the License for the specific language governing permissions and
16
+ # limitations under the License.
17
+
18
+ """Tool for uploading diffs from a version control system to the codereview app.
19
+
20
+ Usage summary: upload.py [options] [-- diff_options] [path...]
21
+
22
+ Diff options are passed to the diff command of the underlying system.
23
+
24
+ Supported version control systems:
25
+ Git
26
+ Mercurial
27
+ Subversion
28
+ Perforce
29
+ CVS
30
+
31
+ It is important for Git/Mercurial users to specify a tree/node/branch to diff
32
+ against by using the '--rev' option.
33
+ """
34
+ # This code is derived from appcfg.py in the App Engine SDK (open source),
35
+ # and from ASPN recipe #146306.
36
+
37
+ import ConfigParser
38
+ import cookielib
39
+ import errno
40
+ import fnmatch
41
+ import getpass
42
+ import logging
43
+ import marshal
44
+ import mimetypes
45
+ import optparse
46
+ import os
47
+ import re
48
+ import socket
49
+ import subprocess
50
+ import sys
51
+ import urllib
52
+ import urllib2
53
+ import urlparse
54
+
55
+ # The md5 module was deprecated in Python 2.5.
56
+ try:
57
+ from hashlib import md5
58
+ except ImportError:
59
+ from md5 import md5
60
+
61
+ try:
62
+ import readline
63
+ except ImportError:
64
+ pass
65
+
66
+ try:
67
+ import keyring
68
+ except ImportError:
69
+ keyring = None
70
+
71
+ # The logging verbosity:
72
+ # 0: Errors only.
73
+ # 1: Status messages.
74
+ # 2: Info logs.
75
+ # 3: Debug logs.
76
+ verbosity = 1
77
+
78
+ # The account type used for authentication.
79
+ # This line could be changed by the review server (see handler for
80
+ # upload.py).
81
+ AUTH_ACCOUNT_TYPE = "GOOGLE"
82
+
83
+ # URL of the default review server. As for AUTH_ACCOUNT_TYPE, this line could be
84
+ # changed by the review server (see handler for upload.py).
85
+ DEFAULT_REVIEW_SERVER = "codereview.appspot.com"
86
+
87
+ # Max size of patch or base file.
88
+ MAX_UPLOAD_SIZE = 900 * 1024
89
+
90
+ # Constants for version control names. Used by GuessVCSName.
91
+ VCS_GIT = "Git"
92
+ VCS_MERCURIAL = "Mercurial"
93
+ VCS_SUBVERSION = "Subversion"
94
+ VCS_PERFORCE = "Perforce"
95
+ VCS_CVS = "CVS"
96
+ VCS_UNKNOWN = "Unknown"
97
+
98
+ VCS_ABBREVIATIONS = {
99
+ VCS_MERCURIAL.lower(): VCS_MERCURIAL,
100
+ "hg": VCS_MERCURIAL,
101
+ VCS_SUBVERSION.lower(): VCS_SUBVERSION,
102
+ "svn": VCS_SUBVERSION,
103
+ VCS_PERFORCE.lower(): VCS_PERFORCE,
104
+ "p4": VCS_PERFORCE,
105
+ VCS_GIT.lower(): VCS_GIT,
106
+ VCS_CVS.lower(): VCS_CVS,
107
+ }
108
+
109
+ # The result of parsing Subversion's [auto-props] setting.
110
+ svn_auto_props_map = None
111
+
112
+ def GetEmail(prompt):
113
+ """Prompts the user for their email address and returns it.
114
+
115
+ The last used email address is saved to a file and offered up as a suggestion
116
+ to the user. If the user presses enter without typing in anything the last
117
+ used email address is used. If the user enters a new address, it is saved
118
+ for next time we prompt.
119
+
120
+ """
121
+ last_email_file_name = os.path.expanduser("~/.last_codereview_email_address")
122
+ last_email = ""
123
+ if os.path.exists(last_email_file_name):
124
+ try:
125
+ last_email_file = open(last_email_file_name, "r")
126
+ last_email = last_email_file.readline().strip("\n")
127
+ last_email_file.close()
128
+ prompt += " [%s]" % last_email
129
+ except IOError, e:
130
+ pass
131
+ email = raw_input(prompt + ": ").strip()
132
+ if email:
133
+ try:
134
+ last_email_file = open(last_email_file_name, "w")
135
+ last_email_file.write(email)
136
+ last_email_file.close()
137
+ except IOError, e:
138
+ pass
139
+ else:
140
+ email = last_email
141
+ return email
142
+
143
+
144
+ def StatusUpdate(msg):
145
+ """Print a status message to stdout.
146
+
147
+ If 'verbosity' is greater than 0, print the message.
148
+
149
+ Args:
150
+ msg: The string to print.
151
+ """
152
+ if verbosity > 0:
153
+ print msg
154
+
155
+
156
+ def ErrorExit(msg):
157
+ """Print an error message to stderr and exit."""
158
+ print >>sys.stderr, msg
159
+ sys.exit(1)
160
+
161
+
162
+ class ClientLoginError(urllib2.HTTPError):
163
+ """Raised to indicate there was an error authenticating with ClientLogin."""
164
+
165
+ def __init__(self, url, code, msg, headers, args):
166
+ urllib2.HTTPError.__init__(self, url, code, msg, headers, None)
167
+ self.args = args
168
+ self._reason = args["Error"]
169
+ self.info = args.get("Info", None)
170
+
171
+ @property
172
+ def reason(self):
173
+ # reason is a property on python 2.7 but a member variable on <=2.6.
174
+ # self.args is modified so it cannot be used as-is so save the value in
175
+ # self._reason.
176
+ return self._reason
177
+
178
+
179
+ class AbstractRpcServer(object):
180
+ """Provides a common interface for a simple RPC server."""
181
+
182
+ def __init__(self, host, auth_function, host_override=None, extra_headers={},
183
+ save_cookies=False, account_type=AUTH_ACCOUNT_TYPE):
184
+ """Creates a new AbstractRpcServer.
185
+
186
+ Args:
187
+ host: The host to send requests to.
188
+ auth_function: A function that takes no arguments and returns an
189
+ (email, password) tuple when called. Will be called if authentication
190
+ is required.
191
+ host_override: The host header to send to the server (defaults to host).
192
+ extra_headers: A dict of extra headers to append to every request.
193
+ save_cookies: If True, save the authentication cookies to local disk.
194
+ If False, use an in-memory cookiejar instead. Subclasses must
195
+ implement this functionality. Defaults to False.
196
+ account_type: Account type used for authentication. Defaults to
197
+ AUTH_ACCOUNT_TYPE.
198
+ """
199
+ self.host = host
200
+ if (not self.host.startswith("http://") and
201
+ not self.host.startswith("https://")):
202
+ self.host = "http://" + self.host
203
+ self.host_override = host_override
204
+ self.auth_function = auth_function
205
+ self.authenticated = False
206
+ self.extra_headers = extra_headers
207
+ self.save_cookies = save_cookies
208
+ self.account_type = account_type
209
+ self.opener = self._GetOpener()
210
+ if self.host_override:
211
+ logging.info("Server: %s; Host: %s", self.host, self.host_override)
212
+ else:
213
+ logging.info("Server: %s", self.host)
214
+
215
+ def _GetOpener(self):
216
+ """Returns an OpenerDirector for making HTTP requests.
217
+
218
+ Returns:
219
+ A urllib2.OpenerDirector object.
220
+ """
221
+ raise NotImplementedError()
222
+
223
+ def _CreateRequest(self, url, data=None):
224
+ """Creates a new urllib request."""
225
+ logging.debug("Creating request for: '%s' with payload:\n%s", url, data)
226
+ req = urllib2.Request(url, data=data, headers={"Accept": "text/plain"})
227
+ if self.host_override:
228
+ req.add_header("Host", self.host_override)
229
+ for key, value in self.extra_headers.iteritems():
230
+ req.add_header(key, value)
231
+ return req
232
+
233
+ def _GetAuthToken(self, email, password):
234
+ """Uses ClientLogin to authenticate the user, returning an auth token.
235
+
236
+ Args:
237
+ email: The user's email address
238
+ password: The user's password
239
+
240
+ Raises:
241
+ ClientLoginError: If there was an error authenticating with ClientLogin.
242
+ HTTPError: If there was some other form of HTTP error.
243
+
244
+ Returns:
245
+ The authentication token returned by ClientLogin.
246
+ """
247
+ account_type = self.account_type
248
+ if self.host.endswith(".google.com"):
249
+ # Needed for use inside Google.
250
+ account_type = "HOSTED"
251
+ req = self._CreateRequest(
252
+ url="https://www.google.com/accounts/ClientLogin",
253
+ data=urllib.urlencode({
254
+ "Email": email,
255
+ "Passwd": password,
256
+ "service": "ah",
257
+ "source": "rietveld-codereview-upload",
258
+ "accountType": account_type,
259
+ }),
260
+ )
261
+ try:
262
+ response = self.opener.open(req)
263
+ response_body = response.read()
264
+ response_dict = dict(x.split("=")
265
+ for x in response_body.split("\n") if x)
266
+ return response_dict["Auth"]
267
+ except urllib2.HTTPError, e:
268
+ if e.code == 403:
269
+ body = e.read()
270
+ response_dict = dict(x.split("=", 1) for x in body.split("\n") if x)
271
+ raise ClientLoginError(req.get_full_url(), e.code, e.msg,
272
+ e.headers, response_dict)
273
+ else:
274
+ raise
275
+
276
+ def _GetAuthCookie(self, auth_token):
277
+ """Fetches authentication cookies for an authentication token.
278
+
279
+ Args:
280
+ auth_token: The authentication token returned by ClientLogin.
281
+
282
+ Raises:
283
+ HTTPError: If there was an error fetching the authentication cookies.
284
+ """
285
+ # This is a dummy value to allow us to identify when we're successful.
286
+ continue_location = "http://localhost/"
287
+ args = {"continue": continue_location, "auth": auth_token}
288
+ req = self._CreateRequest("%s/_ah/login?%s" %
289
+ (self.host, urllib.urlencode(args)))
290
+ try:
291
+ response = self.opener.open(req)
292
+ except urllib2.HTTPError, e:
293
+ response = e
294
+ if (response.code != 302 or
295
+ response.info()["location"] != continue_location):
296
+ raise urllib2.HTTPError(req.get_full_url(), response.code, response.msg,
297
+ response.headers, response.fp)
298
+ self.authenticated = True
299
+
300
+ def _Authenticate(self):
301
+ """Authenticates the user.
302
+
303
+ The authentication process works as follows:
304
+ 1) We get a username and password from the user
305
+ 2) We use ClientLogin to obtain an AUTH token for the user
306
+ (see http://code.google.com/apis/accounts/AuthForInstalledApps.html).
307
+ 3) We pass the auth token to /_ah/login on the server to obtain an
308
+ authentication cookie. If login was successful, it tries to redirect
309
+ us to the URL we provided.
310
+
311
+ If we attempt to access the upload API without first obtaining an
312
+ authentication cookie, it returns a 401 response (or a 302) and
313
+ directs us to authenticate ourselves with ClientLogin.
314
+ """
315
+ for i in range(3):
316
+ credentials = self.auth_function()
317
+ try:
318
+ auth_token = self._GetAuthToken(credentials[0], credentials[1])
319
+ except ClientLoginError, e:
320
+ print >>sys.stderr, ''
321
+ if e.reason == "BadAuthentication":
322
+ if e.info == "InvalidSecondFactor":
323
+ print >>sys.stderr, (
324
+ "Use an application-specific password instead "
325
+ "of your regular account password.\n"
326
+ "See http://www.google.com/"
327
+ "support/accounts/bin/answer.py?answer=185833")
328
+ else:
329
+ print >>sys.stderr, "Invalid username or password."
330
+ elif e.reason == "CaptchaRequired":
331
+ print >>sys.stderr, (
332
+ "Please go to\n"
333
+ "https://www.google.com/accounts/DisplayUnlockCaptcha\n"
334
+ "and verify you are a human. Then try again.\n"
335
+ "If you are using a Google Apps account the URL is:\n"
336
+ "https://www.google.com/a/yourdomain.com/UnlockCaptcha")
337
+ elif e.reason == "NotVerified":
338
+ print >>sys.stderr, "Account not verified."
339
+ elif e.reason == "TermsNotAgreed":
340
+ print >>sys.stderr, "User has not agreed to TOS."
341
+ elif e.reason == "AccountDeleted":
342
+ print >>sys.stderr, "The user account has been deleted."
343
+ elif e.reason == "AccountDisabled":
344
+ print >>sys.stderr, "The user account has been disabled."
345
+ break
346
+ elif e.reason == "ServiceDisabled":
347
+ print >>sys.stderr, ("The user's access to the service has been "
348
+ "disabled.")
349
+ elif e.reason == "ServiceUnavailable":
350
+ print >>sys.stderr, "The service is not available; try again later."
351
+ else:
352
+ # Unknown error.
353
+ raise
354
+ print >>sys.stderr, ''
355
+ continue
356
+ self._GetAuthCookie(auth_token)
357
+ return
358
+
359
+ def Send(self, request_path, payload=None,
360
+ content_type="application/octet-stream",
361
+ timeout=None,
362
+ extra_headers=None,
363
+ **kwargs):
364
+ """Sends an RPC and returns the response.
365
+
366
+ Args:
367
+ request_path: The path to send the request to, eg /api/appversion/create.
368
+ payload: The body of the request, or None to send an empty request.
369
+ content_type: The Content-Type header to use.
370
+ timeout: timeout in seconds; default None i.e. no timeout.
371
+ (Note: for large requests on OS X, the timeout doesn't work right.)
372
+ extra_headers: Dict containing additional HTTP headers that should be
373
+ included in the request (string header names mapped to their values),
374
+ or None to not include any additional headers.
375
+ kwargs: Any keyword arguments are converted into query string parameters.
376
+
377
+ Returns:
378
+ The response body, as a string.
379
+ """
380
+ # TODO: Don't require authentication. Let the server say
381
+ # whether it is necessary.
382
+ if not self.authenticated:
383
+ self._Authenticate()
384
+
385
+ old_timeout = socket.getdefaulttimeout()
386
+ socket.setdefaulttimeout(timeout)
387
+ try:
388
+ tries = 0
389
+ while True:
390
+ tries += 1
391
+ args = dict(kwargs)
392
+ url = "%s%s" % (self.host, request_path)
393
+ if args:
394
+ url += "?" + urllib.urlencode(args)
395
+ req = self._CreateRequest(url=url, data=payload)
396
+ req.add_header("Content-Type", content_type)
397
+ if extra_headers:
398
+ for header, value in extra_headers.items():
399
+ req.add_header(header, value)
400
+ try:
401
+ f = self.opener.open(req)
402
+ response = f.read()
403
+ f.close()
404
+ return response
405
+ except urllib2.HTTPError, e:
406
+ if tries > 3:
407
+ raise
408
+ elif e.code == 401 or e.code == 302:
409
+ self._Authenticate()
410
+ elif e.code == 301:
411
+ # Handle permanent redirect manually.
412
+ url = e.info()["location"]
413
+ url_loc = urlparse.urlparse(url)
414
+ self.host = '%s://%s' % (url_loc[0], url_loc[1])
415
+ elif e.code >= 500:
416
+ ErrorExit(e.read())
417
+ else:
418
+ raise
419
+ finally:
420
+ socket.setdefaulttimeout(old_timeout)
421
+
422
+
423
+ class HttpRpcServer(AbstractRpcServer):
424
+ """Provides a simplified RPC-style interface for HTTP requests."""
425
+
426
+ def _Authenticate(self):
427
+ """Save the cookie jar after authentication."""
428
+ super(HttpRpcServer, self)._Authenticate()
429
+ if self.save_cookies:
430
+ StatusUpdate("Saving authentication cookies to %s" % self.cookie_file)
431
+ self.cookie_jar.save()
432
+
433
+ def _GetOpener(self):
434
+ """Returns an OpenerDirector that supports cookies and ignores redirects.
435
+
436
+ Returns:
437
+ A urllib2.OpenerDirector object.
438
+ """
439
+ opener = urllib2.OpenerDirector()
440
+ opener.add_handler(urllib2.ProxyHandler())
441
+ opener.add_handler(urllib2.UnknownHandler())
442
+ opener.add_handler(urllib2.HTTPHandler())
443
+ opener.add_handler(urllib2.HTTPDefaultErrorHandler())
444
+ opener.add_handler(urllib2.HTTPSHandler())
445
+ opener.add_handler(urllib2.HTTPErrorProcessor())
446
+ if self.save_cookies:
447
+ self.cookie_file = os.path.expanduser("~/.codereview_upload_cookies")
448
+ self.cookie_jar = cookielib.MozillaCookieJar(self.cookie_file)
449
+ if os.path.exists(self.cookie_file):
450
+ try:
451
+ self.cookie_jar.load()
452
+ self.authenticated = True
453
+ StatusUpdate("Loaded authentication cookies from %s" %
454
+ self.cookie_file)
455
+ except (cookielib.LoadError, IOError):
456
+ # Failed to load cookies - just ignore them.
457
+ pass
458
+ else:
459
+ # Create an empty cookie file with mode 600
460
+ fd = os.open(self.cookie_file, os.O_CREAT, 0600)
461
+ os.close(fd)
462
+ # Always chmod the cookie file
463
+ os.chmod(self.cookie_file, 0600)
464
+ else:
465
+ # Don't save cookies across runs of update.py.
466
+ self.cookie_jar = cookielib.CookieJar()
467
+ opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar))
468
+ return opener
469
+
470
+
471
+ class CondensedHelpFormatter(optparse.IndentedHelpFormatter):
472
+ """Frees more horizontal space by removing indentation from group
473
+ options and collapsing arguments between short and long, e.g.
474
+ '-o ARG, --opt=ARG' to -o --opt ARG"""
475
+
476
+ def format_heading(self, heading):
477
+ return "%s:\n" % heading
478
+
479
+ def format_option(self, option):
480
+ self.dedent()
481
+ res = optparse.HelpFormatter.format_option(self, option)
482
+ self.indent()
483
+ return res
484
+
485
+ def format_option_strings(self, option):
486
+ self.set_long_opt_delimiter(" ")
487
+ optstr = optparse.HelpFormatter.format_option_strings(self, option)
488
+ optlist = optstr.split(", ")
489
+ if len(optlist) > 1:
490
+ if option.takes_value():
491
+ # strip METAVAR from all but the last option
492
+ optlist = [x.split()[0] for x in optlist[:-1]] + optlist[-1:]
493
+ optstr = " ".join(optlist)
494
+ return optstr
495
+
496
+
497
+ parser = optparse.OptionParser(
498
+ usage="%prog [options] [-- diff_options] [path...]",
499
+ add_help_option=False,
500
+ formatter=CondensedHelpFormatter()
501
+ )
502
+ parser.add_option("-h", "--help", action="store_true",
503
+ help="Show this help message and exit.")
504
+ parser.add_option("-y", "--assume_yes", action="store_true",
505
+ dest="assume_yes", default=False,
506
+ help="Assume that the answer to yes/no questions is 'yes'.")
507
+ # Logging
508
+ group = parser.add_option_group("Logging options")
509
+ group.add_option("-q", "--quiet", action="store_const", const=0,
510
+ dest="verbose", help="Print errors only.")
511
+ group.add_option("-v", "--verbose", action="store_const", const=2,
512
+ dest="verbose", default=1,
513
+ help="Print info level logs.")
514
+ group.add_option("--noisy", action="store_const", const=3,
515
+ dest="verbose", help="Print all logs.")
516
+ group.add_option("--print_diffs", dest="print_diffs", action="store_true",
517
+ help="Print full diffs.")
518
+ # Review server
519
+ group = parser.add_option_group("Review server options")
520
+ group.add_option("-s", "--server", action="store", dest="server",
521
+ default=DEFAULT_REVIEW_SERVER,
522
+ metavar="SERVER",
523
+ help=("The server to upload to. The format is host[:port]. "
524
+ "Defaults to '%default'."))
525
+ group.add_option("-e", "--email", action="store", dest="email",
526
+ metavar="EMAIL", default=None,
527
+ help="The username to use. Will prompt if omitted.")
528
+ group.add_option("-H", "--host", action="store", dest="host",
529
+ metavar="HOST", default=None,
530
+ help="Overrides the Host header sent with all RPCs.")
531
+ group.add_option("--no_cookies", action="store_false",
532
+ dest="save_cookies", default=True,
533
+ help="Do not save authentication cookies to local disk.")
534
+ group.add_option("--account_type", action="store", dest="account_type",
535
+ metavar="TYPE", default=AUTH_ACCOUNT_TYPE,
536
+ choices=["GOOGLE", "HOSTED"],
537
+ help=("Override the default account type "
538
+ "(defaults to '%default', "
539
+ "valid choices are 'GOOGLE' and 'HOSTED')."))
540
+ # Issue
541
+ group = parser.add_option_group("Issue options")
542
+ group.add_option("-t", "--title", action="store", dest="title",
543
+ help="New issue subject or new patch set title")
544
+ group.add_option("-m", "--message", action="store", dest="message",
545
+ default=None,
546
+ help="New issue description or new patch set message")
547
+ group.add_option("-F", "--file", action="store", dest="file",
548
+ default=None, help="Read the message above from file.")
549
+ group.add_option("-r", "--reviewers", action="store", dest="reviewers",
550
+ metavar="REVIEWERS", default=None,
551
+ help="Add reviewers (comma separated email addresses).")
552
+ group.add_option("--cc", action="store", dest="cc",
553
+ metavar="CC", default=None,
554
+ help="Add CC (comma separated email addresses).")
555
+ group.add_option("--private", action="store_true", dest="private",
556
+ default=False,
557
+ help="Make the issue restricted to reviewers and those CCed")
558
+ # Upload options
559
+ group = parser.add_option_group("Patch options")
560
+ group.add_option("-i", "--issue", type="int", action="store",
561
+ metavar="ISSUE", default=None,
562
+ help="Issue number to which to add. Defaults to new issue.")
563
+ group.add_option("--base_url", action="store", dest="base_url", default=None,
564
+ help="Base URL path for files (listed as \"Base URL\" when "
565
+ "viewing issue). If omitted, will be guessed automatically "
566
+ "for SVN repos and left blank for others.")
567
+ group.add_option("--download_base", action="store_true",
568
+ dest="download_base", default=False,
569
+ help="Base files will be downloaded by the server "
570
+ "(side-by-side diffs may not work on files with CRs).")
571
+ group.add_option("--rev", action="store", dest="revision",
572
+ metavar="REV", default=None,
573
+ help="Base revision/branch/tree to diff against. Use "
574
+ "rev1:rev2 range to review already committed changeset.")
575
+ group.add_option("--send_mail", action="store_true",
576
+ dest="send_mail", default=False,
577
+ help="Send notification email to reviewers.")
578
+ group.add_option("-p", "--send_patch", action="store_true",
579
+ dest="send_patch", default=False,
580
+ help="Same as --send_mail, but include diff as an "
581
+ "attachment, and prepend email subject with 'PATCH:'.")
582
+ group.add_option("--vcs", action="store", dest="vcs",
583
+ metavar="VCS", default=None,
584
+ help=("Version control system (optional, usually upload.py "
585
+ "already guesses the right VCS)."))
586
+ group.add_option("--emulate_svn_auto_props", action="store_true",
587
+ dest="emulate_svn_auto_props", default=False,
588
+ help=("Emulate Subversion's auto properties feature."))
589
+ # Perforce-specific
590
+ group = parser.add_option_group("Perforce-specific options "
591
+ "(overrides P4 environment variables)")
592
+ group.add_option("--p4_port", action="store", dest="p4_port",
593
+ metavar="P4_PORT", default=None,
594
+ help=("Perforce server and port (optional)"))
595
+ group.add_option("--p4_changelist", action="store", dest="p4_changelist",
596
+ metavar="P4_CHANGELIST", default=None,
597
+ help=("Perforce changelist id"))
598
+ group.add_option("--p4_client", action="store", dest="p4_client",
599
+ metavar="P4_CLIENT", default=None,
600
+ help=("Perforce client/workspace"))
601
+ group.add_option("--p4_user", action="store", dest="p4_user",
602
+ metavar="P4_USER", default=None,
603
+ help=("Perforce user"))
604
+
605
+
606
+ class KeyringCreds(object):
607
+ def __init__(self, server, host, email):
608
+ self.server = server
609
+ self.host = host
610
+ self.email = email
611
+ self.accounts_seen = set()
612
+
613
+ def GetUserCredentials(self):
614
+ """Prompts the user for a username and password.
615
+
616
+ Only use keyring on the initial call. If the keyring contains the wrong
617
+ password, we want to give the user a chance to enter another one.
618
+ """
619
+ # Create a local alias to the email variable to avoid Python's crazy
620
+ # scoping rules.
621
+ global keyring
622
+ email = self.email
623
+ if email is None:
624
+ email = GetEmail("Email (login for uploading to %s)" % self.server)
625
+ password = None
626
+ if keyring and not email in self.accounts_seen:
627
+ try:
628
+ password = keyring.get_password(self.host, email)
629
+ except:
630
+ # Sadly, we have to trap all errors here as
631
+ # gnomekeyring.IOError inherits from object. :/
632
+ print "Failed to get password from keyring"
633
+ keyring = None
634
+ if password is not None:
635
+ print "Using password from system keyring."
636
+ self.accounts_seen.add(email)
637
+ else:
638
+ password = getpass.getpass("Password for %s: " % email)
639
+ if keyring:
640
+ answer = raw_input("Store password in system keyring?(y/N) ").strip()
641
+ if answer == "y":
642
+ keyring.set_password(self.host, email, password)
643
+ self.accounts_seen.add(email)
644
+ return (email, password)
645
+
646
+
647
+ def GetRpcServer(server, email=None, host_override=None, save_cookies=True,
648
+ account_type=AUTH_ACCOUNT_TYPE):
649
+ """Returns an instance of an AbstractRpcServer.
650
+
651
+ Args:
652
+ server: String containing the review server URL.
653
+ email: String containing user's email address.
654
+ host_override: If not None, string containing an alternate hostname to use
655
+ in the host header.
656
+ save_cookies: Whether authentication cookies should be saved to disk.
657
+ account_type: Account type for authentication, either 'GOOGLE'
658
+ or 'HOSTED'. Defaults to AUTH_ACCOUNT_TYPE.
659
+
660
+ Returns:
661
+ A new HttpRpcServer, on which RPC calls can be made.
662
+ """
663
+
664
+ # If this is the dev_appserver, use fake authentication.
665
+ host = (host_override or server).lower()
666
+ if re.match(r'(http://)?localhost([:/]|$)', host):
667
+ if email is None:
668
+ email = "test@example.com"
669
+ logging.info("Using debug user %s. Override with --email" % email)
670
+ server = HttpRpcServer(
671
+ server,
672
+ lambda: (email, "password"),
673
+ host_override=host_override,
674
+ extra_headers={"Cookie":
675
+ 'dev_appserver_login="%s:False"' % email},
676
+ save_cookies=save_cookies,
677
+ account_type=account_type)
678
+ # Don't try to talk to ClientLogin.
679
+ server.authenticated = True
680
+ return server
681
+
682
+ return HttpRpcServer(server,
683
+ KeyringCreds(server, host, email).GetUserCredentials,
684
+ host_override=host_override,
685
+ save_cookies=save_cookies)
686
+
687
+
688
+ def EncodeMultipartFormData(fields, files):
689
+ """Encode form fields for multipart/form-data.
690
+
691
+ Args:
692
+ fields: A sequence of (name, value) elements for regular form fields.
693
+ files: A sequence of (name, filename, value) elements for data to be
694
+ uploaded as files.
695
+ Returns:
696
+ (content_type, body) ready for httplib.HTTP instance.
697
+
698
+ Source:
699
+ http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/146306
700
+ """
701
+ BOUNDARY = '-M-A-G-I-C---B-O-U-N-D-A-R-Y-'
702
+ CRLF = '\r\n'
703
+ lines = []
704
+ for (key, value) in fields:
705
+ lines.append('--' + BOUNDARY)
706
+ lines.append('Content-Disposition: form-data; name="%s"' % key)
707
+ lines.append('')
708
+ if isinstance(value, unicode):
709
+ value = value.encode('utf-8')
710
+ lines.append(value)
711
+ for (key, filename, value) in files:
712
+ lines.append('--' + BOUNDARY)
713
+ lines.append('Content-Disposition: form-data; name="%s"; filename="%s"' %
714
+ (key, filename))
715
+ lines.append('Content-Type: %s' % GetContentType(filename))
716
+ lines.append('')
717
+ if isinstance(value, unicode):
718
+ value = value.encode('utf-8')
719
+ lines.append(value)
720
+ lines.append('--' + BOUNDARY + '--')
721
+ lines.append('')
722
+ body = CRLF.join(lines)
723
+ content_type = 'multipart/form-data; boundary=%s' % BOUNDARY
724
+ return content_type, body
725
+
726
+
727
+ def GetContentType(filename):
728
+ """Helper to guess the content-type from the filename."""
729
+ return mimetypes.guess_type(filename)[0] or 'application/octet-stream'
730
+
731
+
732
+ # Use a shell for subcommands on Windows to get a PATH search.
733
+ use_shell = sys.platform.startswith("win")
734
+
735
+ def RunShellWithReturnCodeAndStderr(command, print_output=False,
736
+ universal_newlines=True,
737
+ env=os.environ):
738
+ """Executes a command and returns the output from stdout, stderr and the return code.
739
+
740
+ Args:
741
+ command: Command to execute.
742
+ print_output: If True, the output is printed to stdout.
743
+ If False, both stdout and stderr are ignored.
744
+ universal_newlines: Use universal_newlines flag (default: True).
745
+
746
+ Returns:
747
+ Tuple (stdout, stderr, return code)
748
+ """
749
+ logging.info("Running %s", command)
750
+ env = env.copy()
751
+ env['LC_MESSAGES'] = 'C'
752
+ p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
753
+ shell=use_shell, universal_newlines=universal_newlines,
754
+ env=env)
755
+ if print_output:
756
+ output_array = []
757
+ while True:
758
+ line = p.stdout.readline()
759
+ if not line:
760
+ break
761
+ print line.strip("\n")
762
+ output_array.append(line)
763
+ output = "".join(output_array)
764
+ else:
765
+ output = p.stdout.read()
766
+ p.wait()
767
+ errout = p.stderr.read()
768
+ if print_output and errout:
769
+ print >>sys.stderr, errout
770
+ p.stdout.close()
771
+ p.stderr.close()
772
+ return output, errout, p.returncode
773
+
774
+ def RunShellWithReturnCode(command, print_output=False,
775
+ universal_newlines=True,
776
+ env=os.environ):
777
+ """Executes a command and returns the output from stdout and the return code."""
778
+ out, err, retcode = RunShellWithReturnCodeAndStderr(command, print_output,
779
+ universal_newlines, env)
780
+ return out, retcode
781
+
782
+ def RunShell(command, silent_ok=False, universal_newlines=True,
783
+ print_output=False, env=os.environ):
784
+ data, retcode = RunShellWithReturnCode(command, print_output,
785
+ universal_newlines, env)
786
+ if retcode:
787
+ ErrorExit("Got error status from %s:\n%s" % (command, data))
788
+ if not silent_ok and not data:
789
+ ErrorExit("No output from %s" % command)
790
+ return data
791
+
792
+
793
+ class VersionControlSystem(object):
794
+ """Abstract base class providing an interface to the VCS."""
795
+
796
+ def __init__(self, options):
797
+ """Constructor.
798
+
799
+ Args:
800
+ options: Command line options.
801
+ """
802
+ self.options = options
803
+
804
+ def GetGUID(self):
805
+ """Return string to distinguish the repository from others, for example to
806
+ query all opened review issues for it"""
807
+ raise NotImplementedError(
808
+ "abstract method -- subclass %s must override" % self.__class__)
809
+
810
+ def PostProcessDiff(self, diff):
811
+ """Return the diff with any special post processing this VCS needs, e.g.
812
+ to include an svn-style "Index:"."""
813
+ return diff
814
+
815
+ def GenerateDiff(self, args):
816
+ """Return the current diff as a string.
817
+
818
+ Args:
819
+ args: Extra arguments to pass to the diff command.
820
+ """
821
+ raise NotImplementedError(
822
+ "abstract method -- subclass %s must override" % self.__class__)
823
+
824
+ def GetUnknownFiles(self):
825
+ """Return a list of files unknown to the VCS."""
826
+ raise NotImplementedError(
827
+ "abstract method -- subclass %s must override" % self.__class__)
828
+
829
+ def CheckForUnknownFiles(self):
830
+ """Show an "are you sure?" prompt if there are unknown files."""
831
+ unknown_files = self.GetUnknownFiles()
832
+ if unknown_files:
833
+ print "The following files are not added to version control:"
834
+ for line in unknown_files:
835
+ print line
836
+ prompt = "Are you sure to continue?(y/N) "
837
+ answer = raw_input(prompt).strip()
838
+ if answer != "y":
839
+ ErrorExit("User aborted")
840
+
841
+ def GetBaseFile(self, filename):
842
+ """Get the content of the upstream version of a file.
843
+
844
+ Returns:
845
+ A tuple (base_content, new_content, is_binary, status)
846
+ base_content: The contents of the base file.
847
+ new_content: For text files, this is empty. For binary files, this is
848
+ the contents of the new file, since the diff output won't contain
849
+ information to reconstruct the current file.
850
+ is_binary: True iff the file is binary.
851
+ status: The status of the file.
852
+ """
853
+
854
+ raise NotImplementedError(
855
+ "abstract method -- subclass %s must override" % self.__class__)
856
+
857
+
858
+ def GetBaseFiles(self, diff):
859
+ """Helper that calls GetBase file for each file in the patch.
860
+
861
+ Returns:
862
+ A dictionary that maps from filename to GetBaseFile's tuple. Filenames
863
+ are retrieved based on lines that start with "Index:" or
864
+ "Property changes on:".
865
+ """
866
+ files = {}
867
+ for line in diff.splitlines(True):
868
+ if line.startswith('Index:') or line.startswith('Property changes on:'):
869
+ unused, filename = line.split(':', 1)
870
+ # On Windows if a file has property changes its filename uses '\'
871
+ # instead of '/'.
872
+ filename = filename.strip().replace('\\', '/')
873
+ files[filename] = self.GetBaseFile(filename)
874
+ return files
875
+
876
+
877
+ def UploadBaseFiles(self, issue, rpc_server, patch_list, patchset, options,
878
+ files):
879
+ """Uploads the base files (and if necessary, the current ones as well)."""
880
+
881
+ def UploadFile(filename, file_id, content, is_binary, status, is_base):
882
+ """Uploads a file to the server."""
883
+ file_too_large = False
884
+ if is_base:
885
+ type = "base"
886
+ else:
887
+ type = "current"
888
+ if len(content) > MAX_UPLOAD_SIZE:
889
+ print ("Not uploading the %s file for %s because it's too large." %
890
+ (type, filename))
891
+ file_too_large = True
892
+ content = ""
893
+ checksum = md5(content).hexdigest()
894
+ if options.verbose > 0 and not file_too_large:
895
+ print "Uploading %s file for %s" % (type, filename)
896
+ url = "/%d/upload_content/%d/%d" % (int(issue), int(patchset), file_id)
897
+ form_fields = [("filename", filename),
898
+ ("status", status),
899
+ ("checksum", checksum),
900
+ ("is_binary", str(is_binary)),
901
+ ("is_current", str(not is_base)),
902
+ ]
903
+ if file_too_large:
904
+ form_fields.append(("file_too_large", "1"))
905
+ if options.email:
906
+ form_fields.append(("user", options.email))
907
+ ctype, body = EncodeMultipartFormData(form_fields,
908
+ [("data", filename, content)])
909
+ response_body = rpc_server.Send(url, body,
910
+ content_type=ctype)
911
+ if not response_body.startswith("OK"):
912
+ StatusUpdate(" --> %s" % response_body)
913
+ sys.exit(1)
914
+
915
+ patches = dict()
916
+ [patches.setdefault(v, k) for k, v in patch_list]
917
+ for filename in patches.keys():
918
+ base_content, new_content, is_binary, status = files[filename]
919
+ file_id_str = patches.get(filename)
920
+ if file_id_str.find("nobase") != -1:
921
+ base_content = None
922
+ file_id_str = file_id_str[file_id_str.rfind("_") + 1:]
923
+ file_id = int(file_id_str)
924
+ if base_content != None:
925
+ UploadFile(filename, file_id, base_content, is_binary, status, True)
926
+ if new_content != None:
927
+ UploadFile(filename, file_id, new_content, is_binary, status, False)
928
+
929
+ def IsImage(self, filename):
930
+ """Returns true if the filename has an image extension."""
931
+ mimetype = mimetypes.guess_type(filename)[0]
932
+ if not mimetype:
933
+ return False
934
+ return mimetype.startswith("image/")
935
+
936
+ def IsBinaryData(self, data):
937
+ """Returns true if data contains a null byte."""
938
+ # Derived from how Mercurial's heuristic, see
939
+ # http://selenic.com/hg/file/848a6658069e/mercurial/util.py#l229
940
+ return bool(data and "\0" in data)
941
+
942
+
943
+ class SubversionVCS(VersionControlSystem):
944
+ """Implementation of the VersionControlSystem interface for Subversion."""
945
+
946
+ def __init__(self, options):
947
+ super(SubversionVCS, self).__init__(options)
948
+ if self.options.revision:
949
+ match = re.match(r"(\d+)(:(\d+))?", self.options.revision)
950
+ if not match:
951
+ ErrorExit("Invalid Subversion revision %s." % self.options.revision)
952
+ self.rev_start = match.group(1)
953
+ self.rev_end = match.group(3)
954
+ else:
955
+ self.rev_start = self.rev_end = None
956
+ # Cache output from "svn list -r REVNO dirname".
957
+ # Keys: dirname, Values: 2-tuple (ouput for start rev and end rev).
958
+ self.svnls_cache = {}
959
+ # Base URL is required to fetch files deleted in an older revision.
960
+ # Result is cached to not guess it over and over again in GetBaseFile().
961
+ required = self.options.download_base or self.options.revision is not None
962
+ self.svn_base = self._GuessBase(required)
963
+
964
+ def GetGUID(self):
965
+ return self._GetInfo("Repository UUID")
966
+
967
+ def GuessBase(self, required):
968
+ """Wrapper for _GuessBase."""
969
+ return self.svn_base
970
+
971
+ def _GuessBase(self, required):
972
+ """Returns base URL for current diff.
973
+
974
+ Args:
975
+ required: If true, exits if the url can't be guessed, otherwise None is
976
+ returned.
977
+ """
978
+ url = self._GetInfo("URL")
979
+ if url:
980
+ scheme, netloc, path, params, query, fragment = urlparse.urlparse(url)
981
+ guess = ""
982
+ # TODO(anatoli) - repository specific hacks should be handled by server
983
+ if netloc == "svn.python.org" and scheme == "svn+ssh":
984
+ path = "projects" + path
985
+ scheme = "http"
986
+ guess = "Python "
987
+ elif netloc.endswith(".googlecode.com"):
988
+ scheme = "http"
989
+ guess = "Google Code "
990
+ path = path + "/"
991
+ base = urlparse.urlunparse((scheme, netloc, path, params,
992
+ query, fragment))
993
+ logging.info("Guessed %sbase = %s", guess, base)
994
+ return base
995
+ if required:
996
+ ErrorExit("Can't find URL in output from svn info")
997
+ return None
998
+
999
+ def _GetInfo(self, key):
1000
+ """Parses 'svn info' for current dir. Returns value for key or None"""
1001
+ for line in RunShell(["svn", "info"]).splitlines():
1002
+ if line.startswith(key + ": "):
1003
+ return line.split(":", 1)[1].strip()
1004
+
1005
+ def _EscapeFilename(self, filename):
1006
+ """Escapes filename for SVN commands."""
1007
+ if "@" in filename and not filename.endswith("@"):
1008
+ filename = "%s@" % filename
1009
+ return filename
1010
+
1011
+ def GenerateDiff(self, args):
1012
+ cmd = ["svn", "diff"]
1013
+ if self.options.revision:
1014
+ cmd += ["-r", self.options.revision]
1015
+ cmd.extend(args)
1016
+ data = RunShell(cmd)
1017
+ count = 0
1018
+ for line in data.splitlines():
1019
+ if line.startswith("Index:") or line.startswith("Property changes on:"):
1020
+ count += 1
1021
+ logging.info(line)
1022
+ if not count:
1023
+ ErrorExit("No valid patches found in output from svn diff")
1024
+ return data
1025
+
1026
+ def _CollapseKeywords(self, content, keyword_str):
1027
+ """Collapses SVN keywords."""
1028
+ # svn cat translates keywords but svn diff doesn't. As a result of this
1029
+ # behavior patching.PatchChunks() fails with a chunk mismatch error.
1030
+ # This part was originally written by the Review Board development team
1031
+ # who had the same problem (http://reviews.review-board.org/r/276/).
1032
+ # Mapping of keywords to known aliases
1033
+ svn_keywords = {
1034
+ # Standard keywords
1035
+ 'Date': ['Date', 'LastChangedDate'],
1036
+ 'Revision': ['Revision', 'LastChangedRevision', 'Rev'],
1037
+ 'Author': ['Author', 'LastChangedBy'],
1038
+ 'HeadURL': ['HeadURL', 'URL'],
1039
+ 'Id': ['Id'],
1040
+
1041
+ # Aliases
1042
+ 'LastChangedDate': ['LastChangedDate', 'Date'],
1043
+ 'LastChangedRevision': ['LastChangedRevision', 'Rev', 'Revision'],
1044
+ 'LastChangedBy': ['LastChangedBy', 'Author'],
1045
+ 'URL': ['URL', 'HeadURL'],
1046
+ }
1047
+
1048
+ def repl(m):
1049
+ if m.group(2):
1050
+ return "$%s::%s$" % (m.group(1), " " * len(m.group(3)))
1051
+ return "$%s$" % m.group(1)
1052
+ keywords = [keyword
1053
+ for name in keyword_str.split(" ")
1054
+ for keyword in svn_keywords.get(name, [])]
1055
+ return re.sub(r"\$(%s):(:?)([^\$]+)\$" % '|'.join(keywords), repl, content)
1056
+
1057
+ def GetUnknownFiles(self):
1058
+ status = RunShell(["svn", "status", "--ignore-externals"], silent_ok=True)
1059
+ unknown_files = []
1060
+ for line in status.split("\n"):
1061
+ if line and line[0] == "?":
1062
+ unknown_files.append(line)
1063
+ return unknown_files
1064
+
1065
+ def ReadFile(self, filename):
1066
+ """Returns the contents of a file."""
1067
+ file = open(filename, 'rb')
1068
+ result = ""
1069
+ try:
1070
+ result = file.read()
1071
+ finally:
1072
+ file.close()
1073
+ return result
1074
+
1075
+ def GetStatus(self, filename):
1076
+ """Returns the status of a file."""
1077
+ if not self.options.revision:
1078
+ status = RunShell(["svn", "status", "--ignore-externals",
1079
+ self._EscapeFilename(filename)])
1080
+ if not status:
1081
+ ErrorExit("svn status returned no output for %s" % filename)
1082
+ status_lines = status.splitlines()
1083
+ # If file is in a cl, the output will begin with
1084
+ # "\n--- Changelist 'cl_name':\n". See
1085
+ # http://svn.collab.net/repos/svn/trunk/notes/changelist-design.txt
1086
+ if (len(status_lines) == 3 and
1087
+ not status_lines[0] and
1088
+ status_lines[1].startswith("--- Changelist")):
1089
+ status = status_lines[2]
1090
+ else:
1091
+ status = status_lines[0]
1092
+ # If we have a revision to diff against we need to run "svn list"
1093
+ # for the old and the new revision and compare the results to get
1094
+ # the correct status for a file.
1095
+ else:
1096
+ dirname, relfilename = os.path.split(filename)
1097
+ if dirname not in self.svnls_cache:
1098
+ cmd = ["svn", "list", "-r", self.rev_start,
1099
+ self._EscapeFilename(dirname) or "."]
1100
+ out, err, returncode = RunShellWithReturnCodeAndStderr(cmd)
1101
+ if returncode:
1102
+ # Directory might not yet exist at start revison
1103
+ # svn: Unable to find repository location for 'abc' in revision nnn
1104
+ if re.match('^svn: Unable to find repository location for .+ in revision \d+', err):
1105
+ old_files = ()
1106
+ else:
1107
+ ErrorExit("Failed to get status for %s:\n%s" % (filename, err))
1108
+ else:
1109
+ old_files = out.splitlines()
1110
+ args = ["svn", "list"]
1111
+ if self.rev_end:
1112
+ args += ["-r", self.rev_end]
1113
+ cmd = args + [self._EscapeFilename(dirname) or "."]
1114
+ out, returncode = RunShellWithReturnCode(cmd)
1115
+ if returncode:
1116
+ ErrorExit("Failed to run command %s" % cmd)
1117
+ self.svnls_cache[dirname] = (old_files, out.splitlines())
1118
+ old_files, new_files = self.svnls_cache[dirname]
1119
+ if relfilename in old_files and relfilename not in new_files:
1120
+ status = "D "
1121
+ elif relfilename in old_files and relfilename in new_files:
1122
+ status = "M "
1123
+ else:
1124
+ status = "A "
1125
+ return status
1126
+
1127
+ def GetBaseFile(self, filename):
1128
+ status = self.GetStatus(filename)
1129
+ base_content = None
1130
+ new_content = None
1131
+
1132
+ # If a file is copied its status will be "A +", which signifies
1133
+ # "addition-with-history". See "svn st" for more information. We need to
1134
+ # upload the original file or else diff parsing will fail if the file was
1135
+ # edited.
1136
+ if status[0] == "A" and status[3] != "+":
1137
+ # We'll need to upload the new content if we're adding a binary file
1138
+ # since diff's output won't contain it.
1139
+ mimetype = RunShell(["svn", "propget", "svn:mime-type",
1140
+ self._EscapeFilename(filename)], silent_ok=True)
1141
+ base_content = ""
1142
+ is_binary = bool(mimetype) and not mimetype.startswith("text/")
1143
+ if is_binary:
1144
+ new_content = self.ReadFile(filename)
1145
+ elif (status[0] in ("M", "D", "R") or
1146
+ (status[0] == "A" and status[3] == "+") or # Copied file.
1147
+ (status[0] == " " and status[1] == "M")): # Property change.
1148
+ args = []
1149
+ if self.options.revision:
1150
+ # filename must not be escaped. We already add an ampersand here.
1151
+ url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
1152
+ else:
1153
+ # Don't change filename, it's needed later.
1154
+ url = filename
1155
+ args += ["-r", "BASE"]
1156
+ cmd = ["svn"] + args + ["propget", "svn:mime-type", url]
1157
+ mimetype, returncode = RunShellWithReturnCode(cmd)
1158
+ if returncode:
1159
+ # File does not exist in the requested revision.
1160
+ # Reset mimetype, it contains an error message.
1161
+ mimetype = ""
1162
+ else:
1163
+ mimetype = mimetype.strip()
1164
+ get_base = False
1165
+ # this test for binary is exactly the test prescribed by the
1166
+ # official SVN docs at
1167
+ # http://subversion.apache.org/faq.html#binary-files
1168
+ is_binary = (bool(mimetype) and
1169
+ not mimetype.startswith("text/") and
1170
+ mimetype not in ("image/x-xbitmap", "image/x-xpixmap"))
1171
+ if status[0] == " ":
1172
+ # Empty base content just to force an upload.
1173
+ base_content = ""
1174
+ elif is_binary:
1175
+ get_base = True
1176
+ if status[0] == "M":
1177
+ if not self.rev_end:
1178
+ new_content = self.ReadFile(filename)
1179
+ else:
1180
+ url = "%s/%s@%s" % (self.svn_base, filename, self.rev_end)
1181
+ new_content = RunShell(["svn", "cat", url],
1182
+ universal_newlines=True, silent_ok=True)
1183
+ else:
1184
+ get_base = True
1185
+
1186
+ if get_base:
1187
+ if is_binary:
1188
+ universal_newlines = False
1189
+ else:
1190
+ universal_newlines = True
1191
+ if self.rev_start:
1192
+ # "svn cat -r REV delete_file.txt" doesn't work. cat requires
1193
+ # the full URL with "@REV" appended instead of using "-r" option.
1194
+ url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
1195
+ base_content = RunShell(["svn", "cat", url],
1196
+ universal_newlines=universal_newlines,
1197
+ silent_ok=True)
1198
+ else:
1199
+ base_content, ret_code = RunShellWithReturnCode(
1200
+ ["svn", "cat", self._EscapeFilename(filename)],
1201
+ universal_newlines=universal_newlines)
1202
+ if ret_code and status[0] == "R":
1203
+ # It's a replaced file without local history (see issue208).
1204
+ # The base file needs to be fetched from the server.
1205
+ url = "%s/%s" % (self.svn_base, filename)
1206
+ base_content = RunShell(["svn", "cat", url],
1207
+ universal_newlines=universal_newlines,
1208
+ silent_ok=True)
1209
+ elif ret_code:
1210
+ ErrorExit("Got error status from 'svn cat %s'" % filename)
1211
+ if not is_binary:
1212
+ args = []
1213
+ if self.rev_start:
1214
+ url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
1215
+ else:
1216
+ url = filename
1217
+ args += ["-r", "BASE"]
1218
+ cmd = ["svn"] + args + ["propget", "svn:keywords", url]
1219
+ keywords, returncode = RunShellWithReturnCode(cmd)
1220
+ if keywords and not returncode:
1221
+ base_content = self._CollapseKeywords(base_content, keywords)
1222
+ else:
1223
+ StatusUpdate("svn status returned unexpected output: %s" % status)
1224
+ sys.exit(1)
1225
+ return base_content, new_content, is_binary, status[0:5]
1226
+
1227
+
1228
+ class GitVCS(VersionControlSystem):
1229
+ """Implementation of the VersionControlSystem interface for Git."""
1230
+
1231
+ def __init__(self, options):
1232
+ super(GitVCS, self).__init__(options)
1233
+ # Map of filename -> (hash before, hash after) of base file.
1234
+ # Hashes for "no such file" are represented as None.
1235
+ self.hashes = {}
1236
+ # Map of new filename -> old filename for renames.
1237
+ self.renames = {}
1238
+
1239
+ def GetGUID(self):
1240
+ revlist = RunShell("git rev-list --parents HEAD".split()).splitlines()
1241
+ # M-A: Return the 1st root hash, there could be multiple when a
1242
+ # subtree is merged. In that case, more analysis would need to
1243
+ # be done to figure out which HEAD is the 'most representative'.
1244
+ for r in revlist:
1245
+ if ' ' not in r:
1246
+ return r
1247
+
1248
+ def PostProcessDiff(self, gitdiff):
1249
+ """Converts the diff output to include an svn-style "Index:" line as well
1250
+ as record the hashes of the files, so we can upload them along with our
1251
+ diff."""
1252
+ # Special used by git to indicate "no such content".
1253
+ NULL_HASH = "0"*40
1254
+
1255
+ def IsFileNew(filename):
1256
+ return filename in self.hashes and self.hashes[filename][0] is None
1257
+
1258
+ def AddSubversionPropertyChange(filename):
1259
+ """Add svn's property change information into the patch if given file is
1260
+ new file.
1261
+
1262
+ We use Subversion's auto-props setting to retrieve its property.
1263
+ See http://svnbook.red-bean.com/en/1.1/ch07.html#svn-ch-7-sect-1.3.2 for
1264
+ Subversion's [auto-props] setting.
1265
+ """
1266
+ if self.options.emulate_svn_auto_props and IsFileNew(filename):
1267
+ svnprops = GetSubversionPropertyChanges(filename)
1268
+ if svnprops:
1269
+ svndiff.append("\n" + svnprops + "\n")
1270
+
1271
+ svndiff = []
1272
+ filecount = 0
1273
+ filename = None
1274
+ for line in gitdiff.splitlines():
1275
+ match = re.match(r"diff --git a/(.*) b/(.*)$", line)
1276
+ if match:
1277
+ # Add auto property here for previously seen file.
1278
+ if filename is not None:
1279
+ AddSubversionPropertyChange(filename)
1280
+ filecount += 1
1281
+ # Intentionally use the "after" filename so we can show renames.
1282
+ filename = match.group(2)
1283
+ svndiff.append("Index: %s\n" % filename)
1284
+ if match.group(1) != match.group(2):
1285
+ self.renames[match.group(2)] = match.group(1)
1286
+ else:
1287
+ # The "index" line in a git diff looks like this (long hashes elided):
1288
+ # index 82c0d44..b2cee3f 100755
1289
+ # We want to save the left hash, as that identifies the base file.
1290
+ match = re.match(r"index (\w+)\.\.(\w+)", line)
1291
+ if match:
1292
+ before, after = (match.group(1), match.group(2))
1293
+ if before == NULL_HASH:
1294
+ before = None
1295
+ if after == NULL_HASH:
1296
+ after = None
1297
+ self.hashes[filename] = (before, after)
1298
+ svndiff.append(line + "\n")
1299
+ if not filecount:
1300
+ ErrorExit("No valid patches found in output from git diff")
1301
+ # Add auto property for the last seen file.
1302
+ assert filename is not None
1303
+ AddSubversionPropertyChange(filename)
1304
+ return "".join(svndiff)
1305
+
1306
+ def GenerateDiff(self, extra_args):
1307
+ extra_args = extra_args[:]
1308
+ if self.options.revision:
1309
+ if ":" in self.options.revision:
1310
+ extra_args = self.options.revision.split(":", 1) + extra_args
1311
+ else:
1312
+ extra_args = [self.options.revision] + extra_args
1313
+
1314
+ # --no-ext-diff is broken in some versions of Git, so try to work around
1315
+ # this by overriding the environment (but there is still a problem if the
1316
+ # git config key "diff.external" is used).
1317
+ env = os.environ.copy()
1318
+ if "GIT_EXTERNAL_DIFF" in env:
1319
+ del env["GIT_EXTERNAL_DIFF"]
1320
+ # -M/-C will not print the diff for the deleted file when a file is renamed.
1321
+ # This is confusing because the original file will not be shown on the
1322
+ # review when a file is renamed. So, get a diff with ONLY deletes, then
1323
+ # append a diff (with rename detection), without deletes.
1324
+ cmd = [
1325
+ "git", "diff", "--no-color", "--no-ext-diff", "--full-index",
1326
+ "--ignore-submodules",
1327
+ ]
1328
+ diff = RunShell(
1329
+ cmd + ["--no-renames", "--diff-filter=D"] + extra_args,
1330
+ env=env, silent_ok=True)
1331
+ diff += RunShell(
1332
+ cmd + ["--find-copies-harder", "-l100000", "--diff-filter=AMCRT"]
1333
+ + extra_args,
1334
+ env=env, silent_ok=True)
1335
+
1336
+ # The CL could be only file deletion or not. So accept silent diff for both
1337
+ # commands then check for an empty diff manually.
1338
+ if not diff:
1339
+ ErrorExit("No output from %s" % (cmd + extra_args))
1340
+ return diff
1341
+
1342
+ def GetUnknownFiles(self):
1343
+ status = RunShell(["git", "ls-files", "--exclude-standard", "--others"],
1344
+ silent_ok=True)
1345
+ return status.splitlines()
1346
+
1347
+ def GetFileContent(self, file_hash, is_binary):
1348
+ """Returns the content of a file identified by its git hash."""
1349
+ data, retcode = RunShellWithReturnCode(["git", "show", file_hash],
1350
+ universal_newlines=not is_binary)
1351
+ if retcode:
1352
+ ErrorExit("Got error status from 'git show %s'" % file_hash)
1353
+ return data
1354
+
1355
+ def GetBaseFile(self, filename):
1356
+ hash_before, hash_after = self.hashes.get(filename, (None,None))
1357
+ base_content = None
1358
+ new_content = None
1359
+ status = None
1360
+
1361
+ if filename in self.renames:
1362
+ status = "A +" # Match svn attribute name for renames.
1363
+ if filename not in self.hashes:
1364
+ # If a rename doesn't change the content, we never get a hash.
1365
+ base_content = RunShell(
1366
+ ["git", "show", "HEAD:" + filename], silent_ok=True)
1367
+ elif not hash_before:
1368
+ status = "A"
1369
+ base_content = ""
1370
+ elif not hash_after:
1371
+ status = "D"
1372
+ else:
1373
+ status = "M"
1374
+
1375
+ is_binary = self.IsBinaryData(base_content)
1376
+ is_image = self.IsImage(filename)
1377
+
1378
+ # Grab the before/after content if we need it.
1379
+ # Grab the base content if we don't have it already.
1380
+ if base_content is None and hash_before:
1381
+ base_content = self.GetFileContent(hash_before, is_binary)
1382
+ # Only include the "after" file if it's an image; otherwise it
1383
+ # it is reconstructed from the diff.
1384
+ if is_image and hash_after:
1385
+ new_content = self.GetFileContent(hash_after, is_binary)
1386
+
1387
+ return (base_content, new_content, is_binary, status)
1388
+
1389
+
1390
+ class CVSVCS(VersionControlSystem):
1391
+ """Implementation of the VersionControlSystem interface for CVS."""
1392
+
1393
+ def __init__(self, options):
1394
+ super(CVSVCS, self).__init__(options)
1395
+
1396
+ def GetGUID(self):
1397
+ """For now we don't know how to get repository ID for CVS"""
1398
+ return
1399
+
1400
+ def GetOriginalContent_(self, filename):
1401
+ RunShell(["cvs", "up", filename], silent_ok=True)
1402
+ # TODO need detect file content encoding
1403
+ content = open(filename).read()
1404
+ return content.replace("\r\n", "\n")
1405
+
1406
+ def GetBaseFile(self, filename):
1407
+ base_content = None
1408
+ new_content = None
1409
+ status = "A"
1410
+
1411
+ output, retcode = RunShellWithReturnCode(["cvs", "status", filename])
1412
+ if retcode:
1413
+ ErrorExit("Got error status from 'cvs status %s'" % filename)
1414
+
1415
+ if output.find("Status: Locally Modified") != -1:
1416
+ status = "M"
1417
+ temp_filename = "%s.tmp123" % filename
1418
+ os.rename(filename, temp_filename)
1419
+ base_content = self.GetOriginalContent_(filename)
1420
+ os.rename(temp_filename, filename)
1421
+ elif output.find("Status: Locally Added"):
1422
+ status = "A"
1423
+ base_content = ""
1424
+ elif output.find("Status: Needs Checkout"):
1425
+ status = "D"
1426
+ base_content = self.GetOriginalContent_(filename)
1427
+
1428
+ return (base_content, new_content, self.IsBinaryData(base_content), status)
1429
+
1430
+ def GenerateDiff(self, extra_args):
1431
+ cmd = ["cvs", "diff", "-u", "-N"]
1432
+ if self.options.revision:
1433
+ cmd += ["-r", self.options.revision]
1434
+
1435
+ cmd.extend(extra_args)
1436
+ data, retcode = RunShellWithReturnCode(cmd)
1437
+ count = 0
1438
+ if retcode in [0, 1]:
1439
+ for line in data.splitlines():
1440
+ if line.startswith("Index:"):
1441
+ count += 1
1442
+ logging.info(line)
1443
+
1444
+ if not count:
1445
+ ErrorExit("No valid patches found in output from cvs diff")
1446
+
1447
+ return data
1448
+
1449
+ def GetUnknownFiles(self):
1450
+ data, retcode = RunShellWithReturnCode(["cvs", "diff"])
1451
+ if retcode not in [0, 1]:
1452
+ ErrorExit("Got error status from 'cvs diff':\n%s" % (data,))
1453
+ unknown_files = []
1454
+ for line in data.split("\n"):
1455
+ if line and line[0] == "?":
1456
+ unknown_files.append(line)
1457
+ return unknown_files
1458
+
1459
+ class MercurialVCS(VersionControlSystem):
1460
+ """Implementation of the VersionControlSystem interface for Mercurial."""
1461
+
1462
+ def __init__(self, options, repo_dir):
1463
+ super(MercurialVCS, self).__init__(options)
1464
+ # Absolute path to repository (we can be in a subdir)
1465
+ self.repo_dir = os.path.normpath(repo_dir)
1466
+ # Compute the subdir
1467
+ cwd = os.path.normpath(os.getcwd())
1468
+ assert cwd.startswith(self.repo_dir)
1469
+ self.subdir = cwd[len(self.repo_dir):].lstrip(r"\/")
1470
+ if self.options.revision:
1471
+ self.base_rev = self.options.revision
1472
+ else:
1473
+ self.base_rev = RunShell(["hg", "parent", "-q"]).split(':')[1].strip()
1474
+
1475
+ def GetGUID(self):
1476
+ # See chapter "Uniquely identifying a repository"
1477
+ # http://hgbook.red-bean.com/read/customizing-the-output-of-mercurial.html
1478
+ info = RunShell("hg log -r0 --template {node}".split())
1479
+ return info.strip()
1480
+
1481
+ def _GetRelPath(self, filename):
1482
+ """Get relative path of a file according to the current directory,
1483
+ given its logical path in the repo."""
1484
+ absname = os.path.join(self.repo_dir, filename)
1485
+ return os.path.relpath(absname)
1486
+
1487
+ def GenerateDiff(self, extra_args):
1488
+ cmd = ["hg", "diff", "--git", "-r", self.base_rev] + extra_args
1489
+ data = RunShell(cmd, silent_ok=True)
1490
+ svndiff = []
1491
+ filecount = 0
1492
+ for line in data.splitlines():
1493
+ m = re.match("diff --git a/(\S+) b/(\S+)", line)
1494
+ if m:
1495
+ # Modify line to make it look like as it comes from svn diff.
1496
+ # With this modification no changes on the server side are required
1497
+ # to make upload.py work with Mercurial repos.
1498
+ # NOTE: for proper handling of moved/copied files, we have to use
1499
+ # the second filename.
1500
+ filename = m.group(2)
1501
+ svndiff.append("Index: %s" % filename)
1502
+ svndiff.append("=" * 67)
1503
+ filecount += 1
1504
+ logging.info(line)
1505
+ else:
1506
+ svndiff.append(line)
1507
+ if not filecount:
1508
+ ErrorExit("No valid patches found in output from hg diff")
1509
+ return "\n".join(svndiff) + "\n"
1510
+
1511
+ def GetUnknownFiles(self):
1512
+ """Return a list of files unknown to the VCS."""
1513
+ args = []
1514
+ status = RunShell(["hg", "status", "--rev", self.base_rev, "-u", "."],
1515
+ silent_ok=True)
1516
+ unknown_files = []
1517
+ for line in status.splitlines():
1518
+ st, fn = line.split(" ", 1)
1519
+ if st == "?":
1520
+ unknown_files.append(fn)
1521
+ return unknown_files
1522
+
1523
+ def GetBaseFile(self, filename):
1524
+ # "hg status" and "hg cat" both take a path relative to the current subdir,
1525
+ # but "hg diff" has given us the path relative to the repo root.
1526
+ base_content = ""
1527
+ new_content = None
1528
+ is_binary = False
1529
+ oldrelpath = relpath = self._GetRelPath(filename)
1530
+ # "hg status -C" returns two lines for moved/copied files, one otherwise
1531
+ out = RunShell(["hg", "status", "-C", "--rev", self.base_rev, relpath])
1532
+ out = out.splitlines()
1533
+ # HACK: strip error message about missing file/directory if it isn't in
1534
+ # the working copy
1535
+ if out[0].startswith('%s: ' % relpath):
1536
+ out = out[1:]
1537
+ status, _ = out[0].split(' ', 1)
1538
+ if len(out) > 1 and status == "A":
1539
+ # Moved/copied => considered as modified, use old filename to
1540
+ # retrieve base contents
1541
+ oldrelpath = out[1].strip()
1542
+ status = "M"
1543
+ if ":" in self.base_rev:
1544
+ base_rev = self.base_rev.split(":", 1)[0]
1545
+ else:
1546
+ base_rev = self.base_rev
1547
+ if status != "A":
1548
+ base_content = RunShell(["hg", "cat", "-r", base_rev, oldrelpath],
1549
+ silent_ok=True)
1550
+ is_binary = self.IsBinaryData(base_content)
1551
+ if status != "R":
1552
+ new_content = open(relpath, "rb").read()
1553
+ is_binary = is_binary or self.IsBinaryData(new_content)
1554
+ if is_binary and base_content:
1555
+ # Fetch again without converting newlines
1556
+ base_content = RunShell(["hg", "cat", "-r", base_rev, oldrelpath],
1557
+ silent_ok=True, universal_newlines=False)
1558
+ if not is_binary:
1559
+ new_content = None
1560
+ return base_content, new_content, is_binary, status
1561
+
1562
+
1563
+ class PerforceVCS(VersionControlSystem):
1564
+ """Implementation of the VersionControlSystem interface for Perforce."""
1565
+
1566
+ def __init__(self, options):
1567
+
1568
+ def ConfirmLogin():
1569
+ # Make sure we have a valid perforce session
1570
+ while True:
1571
+ data, retcode = self.RunPerforceCommandWithReturnCode(
1572
+ ["login", "-s"], marshal_output=True)
1573
+ if not data:
1574
+ ErrorExit("Error checking perforce login")
1575
+ if not retcode and (not "code" in data or data["code"] != "error"):
1576
+ break
1577
+ print "Enter perforce password: "
1578
+ self.RunPerforceCommandWithReturnCode(["login"])
1579
+
1580
+ super(PerforceVCS, self).__init__(options)
1581
+
1582
+ self.p4_changelist = options.p4_changelist
1583
+ if not self.p4_changelist:
1584
+ ErrorExit("A changelist id is required")
1585
+ if (options.revision):
1586
+ ErrorExit("--rev is not supported for perforce")
1587
+
1588
+ self.p4_port = options.p4_port
1589
+ self.p4_client = options.p4_client
1590
+ self.p4_user = options.p4_user
1591
+
1592
+ ConfirmLogin()
1593
+
1594
+ if not options.title:
1595
+ description = self.RunPerforceCommand(["describe", self.p4_changelist],
1596
+ marshal_output=True)
1597
+ if description and "desc" in description:
1598
+ # Rietveld doesn't support multi-line descriptions
1599
+ raw_title = description["desc"].strip()
1600
+ lines = raw_title.splitlines()
1601
+ if len(lines):
1602
+ options.title = lines[0]
1603
+
1604
+ def GetGUID(self):
1605
+ """For now we don't know how to get repository ID for Perforce"""
1606
+ return
1607
+
1608
+ def RunPerforceCommandWithReturnCode(self, extra_args, marshal_output=False,
1609
+ universal_newlines=True):
1610
+ args = ["p4"]
1611
+ if marshal_output:
1612
+ # -G makes perforce format its output as marshalled python objects
1613
+ args.extend(["-G"])
1614
+ if self.p4_port:
1615
+ args.extend(["-p", self.p4_port])
1616
+ if self.p4_client:
1617
+ args.extend(["-c", self.p4_client])
1618
+ if self.p4_user:
1619
+ args.extend(["-u", self.p4_user])
1620
+ args.extend(extra_args)
1621
+
1622
+ data, retcode = RunShellWithReturnCode(
1623
+ args, print_output=False, universal_newlines=universal_newlines)
1624
+ if marshal_output and data:
1625
+ data = marshal.loads(data)
1626
+ return data, retcode
1627
+
1628
+ def RunPerforceCommand(self, extra_args, marshal_output=False,
1629
+ universal_newlines=True):
1630
+ # This might be a good place to cache call results, since things like
1631
+ # describe or fstat might get called repeatedly.
1632
+ data, retcode = self.RunPerforceCommandWithReturnCode(
1633
+ extra_args, marshal_output, universal_newlines)
1634
+ if retcode:
1635
+ ErrorExit("Got error status from %s:\n%s" % (extra_args, data))
1636
+ return data
1637
+
1638
+ def GetFileProperties(self, property_key_prefix = "", command = "describe"):
1639
+ description = self.RunPerforceCommand(["describe", self.p4_changelist],
1640
+ marshal_output=True)
1641
+
1642
+ changed_files = {}
1643
+ file_index = 0
1644
+ # Try depotFile0, depotFile1, ... until we don't find a match
1645
+ while True:
1646
+ file_key = "depotFile%d" % file_index
1647
+ if file_key in description:
1648
+ filename = description[file_key]
1649
+ change_type = description[property_key_prefix + str(file_index)]
1650
+ changed_files[filename] = change_type
1651
+ file_index += 1
1652
+ else:
1653
+ break
1654
+ return changed_files
1655
+
1656
+ def GetChangedFiles(self):
1657
+ return self.GetFileProperties("action")
1658
+
1659
+ def GetUnknownFiles(self):
1660
+ # Perforce doesn't detect new files, they have to be explicitly added
1661
+ return []
1662
+
1663
+ def IsBaseBinary(self, filename):
1664
+ base_filename = self.GetBaseFilename(filename)
1665
+ return self.IsBinaryHelper(base_filename, "files")
1666
+
1667
+ def IsPendingBinary(self, filename):
1668
+ return self.IsBinaryHelper(filename, "describe")
1669
+
1670
+ def IsBinaryHelper(self, filename, command):
1671
+ file_types = self.GetFileProperties("type", command)
1672
+ if not filename in file_types:
1673
+ ErrorExit("Trying to check binary status of unknown file %s." % filename)
1674
+ # This treats symlinks, macintosh resource files, temporary objects, and
1675
+ # unicode as binary. See the Perforce docs for more details:
1676
+ # http://www.perforce.com/perforce/doc.current/manuals/cmdref/o.ftypes.html
1677
+ return not file_types[filename].endswith("text")
1678
+
1679
+ def GetFileContent(self, filename, revision, is_binary):
1680
+ file_arg = filename
1681
+ if revision:
1682
+ file_arg += "#" + revision
1683
+ # -q suppresses the initial line that displays the filename and revision
1684
+ return self.RunPerforceCommand(["print", "-q", file_arg],
1685
+ universal_newlines=not is_binary)
1686
+
1687
+ def GetBaseFilename(self, filename):
1688
+ actionsWithDifferentBases = [
1689
+ "move/add", # p4 move
1690
+ "branch", # p4 integrate (to a new file), similar to hg "add"
1691
+ "add", # p4 integrate (to a new file), after modifying the new file
1692
+ ]
1693
+
1694
+ # We only see a different base for "add" if this is a downgraded branch
1695
+ # after a file was branched (integrated), then edited.
1696
+ if self.GetAction(filename) in actionsWithDifferentBases:
1697
+ # -Or shows information about pending integrations/moves
1698
+ fstat_result = self.RunPerforceCommand(["fstat", "-Or", filename],
1699
+ marshal_output=True)
1700
+
1701
+ baseFileKey = "resolveFromFile0" # I think it's safe to use only file0
1702
+ if baseFileKey in fstat_result:
1703
+ return fstat_result[baseFileKey]
1704
+
1705
+ return filename
1706
+
1707
+ def GetBaseRevision(self, filename):
1708
+ base_filename = self.GetBaseFilename(filename)
1709
+
1710
+ have_result = self.RunPerforceCommand(["have", base_filename],
1711
+ marshal_output=True)
1712
+ if "haveRev" in have_result:
1713
+ return have_result["haveRev"]
1714
+
1715
+ def GetLocalFilename(self, filename):
1716
+ where = self.RunPerforceCommand(["where", filename], marshal_output=True)
1717
+ if "path" in where:
1718
+ return where["path"]
1719
+
1720
+ def GenerateDiff(self, args):
1721
+ class DiffData:
1722
+ def __init__(self, perforceVCS, filename, action):
1723
+ self.perforceVCS = perforceVCS
1724
+ self.filename = filename
1725
+ self.action = action
1726
+ self.base_filename = perforceVCS.GetBaseFilename(filename)
1727
+
1728
+ self.file_body = None
1729
+ self.base_rev = None
1730
+ self.prefix = None
1731
+ self.working_copy = True
1732
+ self.change_summary = None
1733
+
1734
+ def GenerateDiffHeader(diffData):
1735
+ header = []
1736
+ header.append("Index: %s" % diffData.filename)
1737
+ header.append("=" * 67)
1738
+
1739
+ if diffData.base_filename != diffData.filename:
1740
+ if diffData.action.startswith("move"):
1741
+ verb = "rename"
1742
+ else:
1743
+ verb = "copy"
1744
+ header.append("%s from %s" % (verb, diffData.base_filename))
1745
+ header.append("%s to %s" % (verb, diffData.filename))
1746
+
1747
+ suffix = "\t(revision %s)" % diffData.base_rev
1748
+ header.append("--- " + diffData.base_filename + suffix)
1749
+ if diffData.working_copy:
1750
+ suffix = "\t(working copy)"
1751
+ header.append("+++ " + diffData.filename + suffix)
1752
+ if diffData.change_summary:
1753
+ header.append(diffData.change_summary)
1754
+ return header
1755
+
1756
+ def GenerateMergeDiff(diffData, args):
1757
+ # -du generates a unified diff, which is nearly svn format
1758
+ diffData.file_body = self.RunPerforceCommand(
1759
+ ["diff", "-du", diffData.filename] + args)
1760
+ diffData.base_rev = self.GetBaseRevision(diffData.filename)
1761
+ diffData.prefix = ""
1762
+
1763
+ # We have to replace p4's file status output (the lines starting
1764
+ # with +++ or ---) to match svn's diff format
1765
+ lines = diffData.file_body.splitlines()
1766
+ first_good_line = 0
1767
+ while (first_good_line < len(lines) and
1768
+ not lines[first_good_line].startswith("@@")):
1769
+ first_good_line += 1
1770
+ diffData.file_body = "\n".join(lines[first_good_line:])
1771
+ return diffData
1772
+
1773
+ def GenerateAddDiff(diffData):
1774
+ fstat = self.RunPerforceCommand(["fstat", diffData.filename],
1775
+ marshal_output=True)
1776
+ if "headRev" in fstat:
1777
+ diffData.base_rev = fstat["headRev"] # Re-adding a deleted file
1778
+ else:
1779
+ diffData.base_rev = "0" # Brand new file
1780
+ diffData.working_copy = False
1781
+ rel_path = self.GetLocalFilename(diffData.filename)
1782
+ diffData.file_body = open(rel_path, 'r').read()
1783
+ # Replicate svn's list of changed lines
1784
+ line_count = len(diffData.file_body.splitlines())
1785
+ diffData.change_summary = "@@ -0,0 +1"
1786
+ if line_count > 1:
1787
+ diffData.change_summary += ",%d" % line_count
1788
+ diffData.change_summary += " @@"
1789
+ diffData.prefix = "+"
1790
+ return diffData
1791
+
1792
+ def GenerateDeleteDiff(diffData):
1793
+ diffData.base_rev = self.GetBaseRevision(diffData.filename)
1794
+ is_base_binary = self.IsBaseBinary(diffData.filename)
1795
+ # For deletes, base_filename == filename
1796
+ diffData.file_body = self.GetFileContent(diffData.base_filename,
1797
+ None,
1798
+ is_base_binary)
1799
+ # Replicate svn's list of changed lines
1800
+ line_count = len(diffData.file_body.splitlines())
1801
+ diffData.change_summary = "@@ -1"
1802
+ if line_count > 1:
1803
+ diffData.change_summary += ",%d" % line_count
1804
+ diffData.change_summary += " +0,0 @@"
1805
+ diffData.prefix = "-"
1806
+ return diffData
1807
+
1808
+ changed_files = self.GetChangedFiles()
1809
+
1810
+ svndiff = []
1811
+ filecount = 0
1812
+ for (filename, action) in changed_files.items():
1813
+ svn_status = self.PerforceActionToSvnStatus(action)
1814
+ if svn_status == "SKIP":
1815
+ continue
1816
+
1817
+ diffData = DiffData(self, filename, action)
1818
+ # Is it possible to diff a branched file? Stackoverflow says no:
1819
+ # http://stackoverflow.com/questions/1771314/in-perforce-command-line-how-to-diff-a-file-reopened-for-add
1820
+ if svn_status == "M":
1821
+ diffData = GenerateMergeDiff(diffData, args)
1822
+ elif svn_status == "A":
1823
+ diffData = GenerateAddDiff(diffData)
1824
+ elif svn_status == "D":
1825
+ diffData = GenerateDeleteDiff(diffData)
1826
+ else:
1827
+ ErrorExit("Unknown file action %s (svn action %s)." % \
1828
+ (action, svn_status))
1829
+
1830
+ svndiff += GenerateDiffHeader(diffData)
1831
+
1832
+ for line in diffData.file_body.splitlines():
1833
+ svndiff.append(diffData.prefix + line)
1834
+ filecount += 1
1835
+ if not filecount:
1836
+ ErrorExit("No valid patches found in output from p4 diff")
1837
+ return "\n".join(svndiff) + "\n"
1838
+
1839
+ def PerforceActionToSvnStatus(self, status):
1840
+ # Mirroring the list at http://permalink.gmane.org/gmane.comp.version-control.mercurial.devel/28717
1841
+ # Is there something more official?
1842
+ return {
1843
+ "add" : "A",
1844
+ "branch" : "A",
1845
+ "delete" : "D",
1846
+ "edit" : "M", # Also includes changing file types.
1847
+ "integrate" : "M",
1848
+ "move/add" : "M",
1849
+ "move/delete": "SKIP",
1850
+ "purge" : "D", # How does a file's status become "purge"?
1851
+ }[status]
1852
+
1853
+ def GetAction(self, filename):
1854
+ changed_files = self.GetChangedFiles()
1855
+ if not filename in changed_files:
1856
+ ErrorExit("Trying to get base version of unknown file %s." % filename)
1857
+
1858
+ return changed_files[filename]
1859
+
1860
+ def GetBaseFile(self, filename):
1861
+ base_filename = self.GetBaseFilename(filename)
1862
+ base_content = ""
1863
+ new_content = None
1864
+
1865
+ status = self.PerforceActionToSvnStatus(self.GetAction(filename))
1866
+
1867
+ if status != "A":
1868
+ revision = self.GetBaseRevision(base_filename)
1869
+ if not revision:
1870
+ ErrorExit("Couldn't find base revision for file %s" % filename)
1871
+ is_base_binary = self.IsBaseBinary(base_filename)
1872
+ base_content = self.GetFileContent(base_filename,
1873
+ revision,
1874
+ is_base_binary)
1875
+
1876
+ is_binary = self.IsPendingBinary(filename)
1877
+ if status != "D" and status != "SKIP":
1878
+ relpath = self.GetLocalFilename(filename)
1879
+ if is_binary:
1880
+ new_content = open(relpath, "rb").read()
1881
+
1882
+ return base_content, new_content, is_binary, status
1883
+
1884
+ # NOTE: The SplitPatch function is duplicated in engine.py, keep them in sync.
1885
+ def SplitPatch(data):
1886
+ """Splits a patch into separate pieces for each file.
1887
+
1888
+ Args:
1889
+ data: A string containing the output of svn diff.
1890
+
1891
+ Returns:
1892
+ A list of 2-tuple (filename, text) where text is the svn diff output
1893
+ pertaining to filename.
1894
+ """
1895
+ patches = []
1896
+ filename = None
1897
+ diff = []
1898
+ for line in data.splitlines(True):
1899
+ new_filename = None
1900
+ if line.startswith('Index:'):
1901
+ unused, new_filename = line.split(':', 1)
1902
+ new_filename = new_filename.strip()
1903
+ elif line.startswith('Property changes on:'):
1904
+ unused, temp_filename = line.split(':', 1)
1905
+ # When a file is modified, paths use '/' between directories, however
1906
+ # when a property is modified '\' is used on Windows. Make them the same
1907
+ # otherwise the file shows up twice.
1908
+ temp_filename = temp_filename.strip().replace('\\', '/')
1909
+ if temp_filename != filename:
1910
+ # File has property changes but no modifications, create a new diff.
1911
+ new_filename = temp_filename
1912
+ if new_filename:
1913
+ if filename and diff:
1914
+ patches.append((filename, ''.join(diff)))
1915
+ filename = new_filename
1916
+ diff = [line]
1917
+ continue
1918
+ if diff is not None:
1919
+ diff.append(line)
1920
+ if filename and diff:
1921
+ patches.append((filename, ''.join(diff)))
1922
+ return patches
1923
+
1924
+
1925
+ def UploadSeparatePatches(issue, rpc_server, patchset, data, options):
1926
+ """Uploads a separate patch for each file in the diff output.
1927
+
1928
+ Returns a list of [patch_key, filename] for each file.
1929
+ """
1930
+ patches = SplitPatch(data)
1931
+ rv = []
1932
+ for patch in patches:
1933
+ if len(patch[1]) > MAX_UPLOAD_SIZE:
1934
+ print ("Not uploading the patch for " + patch[0] +
1935
+ " because the file is too large.")
1936
+ continue
1937
+ form_fields = [("filename", patch[0])]
1938
+ if not options.download_base:
1939
+ form_fields.append(("content_upload", "1"))
1940
+ files = [("data", "data.diff", patch[1])]
1941
+ ctype, body = EncodeMultipartFormData(form_fields, files)
1942
+ url = "/%d/upload_patch/%d" % (int(issue), int(patchset))
1943
+ print "Uploading patch for " + patch[0]
1944
+ response_body = rpc_server.Send(url, body, content_type=ctype)
1945
+ lines = response_body.splitlines()
1946
+ if not lines or lines[0] != "OK":
1947
+ StatusUpdate(" --> %s" % response_body)
1948
+ sys.exit(1)
1949
+ rv.append([lines[1], patch[0]])
1950
+ return rv
1951
+
1952
+
1953
+ def GuessVCSName(options):
1954
+ """Helper to guess the version control system.
1955
+
1956
+ This examines the current directory, guesses which VersionControlSystem
1957
+ we're using, and returns an string indicating which VCS is detected.
1958
+
1959
+ Returns:
1960
+ A pair (vcs, output). vcs is a string indicating which VCS was detected
1961
+ and is one of VCS_GIT, VCS_MERCURIAL, VCS_SUBVERSION, VCS_PERFORCE,
1962
+ VCS_CVS, or VCS_UNKNOWN.
1963
+ Since local perforce repositories can't be easily detected, this method
1964
+ will only guess VCS_PERFORCE if any perforce options have been specified.
1965
+ output is a string containing any interesting output from the vcs
1966
+ detection routine, or None if there is nothing interesting.
1967
+ """
1968
+ for attribute, value in options.__dict__.iteritems():
1969
+ if attribute.startswith("p4") and value != None:
1970
+ return (VCS_PERFORCE, None)
1971
+
1972
+ def RunDetectCommand(vcs_type, command):
1973
+ """Helper to detect VCS by executing command.
1974
+
1975
+ Returns:
1976
+ A pair (vcs, output) or None. Throws exception on error.
1977
+ """
1978
+ try:
1979
+ out, returncode = RunShellWithReturnCode(command)
1980
+ if returncode == 0:
1981
+ return (vcs_type, out.strip())
1982
+ except OSError, (errcode, message):
1983
+ if errcode != errno.ENOENT: # command not found code
1984
+ raise
1985
+
1986
+ # Mercurial has a command to get the base directory of a repository
1987
+ # Try running it, but don't die if we don't have hg installed.
1988
+ # NOTE: we try Mercurial first as it can sit on top of an SVN working copy.
1989
+ res = RunDetectCommand(VCS_MERCURIAL, ["hg", "root"])
1990
+ if res != None:
1991
+ return res
1992
+
1993
+ # Subversion from 1.7 has a single centralized .svn folder
1994
+ # ( see http://subversion.apache.org/docs/release-notes/1.7.html#wc-ng )
1995
+ # That's why we use 'svn info' instead of checking for .svn dir
1996
+ res = RunDetectCommand(VCS_SUBVERSION, ["svn", "info"])
1997
+ if res != None:
1998
+ return res
1999
+
2000
+ # Git has a command to test if you're in a git tree.
2001
+ # Try running it, but don't die if we don't have git installed.
2002
+ res = RunDetectCommand(VCS_GIT, ["git", "rev-parse",
2003
+ "--is-inside-work-tree"])
2004
+ if res != None:
2005
+ return res
2006
+
2007
+ # detect CVS repos use `cvs status && $? == 0` rules
2008
+ res = RunDetectCommand(VCS_CVS, ["cvs", "status"])
2009
+ if res != None:
2010
+ return res
2011
+
2012
+ return (VCS_UNKNOWN, None)
2013
+
2014
+
2015
+ def GuessVCS(options):
2016
+ """Helper to guess the version control system.
2017
+
2018
+ This verifies any user-specified VersionControlSystem (by command line
2019
+ or environment variable). If the user didn't specify one, this examines
2020
+ the current directory, guesses which VersionControlSystem we're using,
2021
+ and returns an instance of the appropriate class. Exit with an error
2022
+ if we can't figure it out.
2023
+
2024
+ Returns:
2025
+ A VersionControlSystem instance. Exits if the VCS can't be guessed.
2026
+ """
2027
+ vcs = options.vcs
2028
+ if not vcs:
2029
+ vcs = os.environ.get("CODEREVIEW_VCS")
2030
+ if vcs:
2031
+ v = VCS_ABBREVIATIONS.get(vcs.lower())
2032
+ if v is None:
2033
+ ErrorExit("Unknown version control system %r specified." % vcs)
2034
+ (vcs, extra_output) = (v, None)
2035
+ else:
2036
+ (vcs, extra_output) = GuessVCSName(options)
2037
+
2038
+ if vcs == VCS_MERCURIAL:
2039
+ if extra_output is None:
2040
+ extra_output = RunShell(["hg", "root"]).strip()
2041
+ return MercurialVCS(options, extra_output)
2042
+ elif vcs == VCS_SUBVERSION:
2043
+ return SubversionVCS(options)
2044
+ elif vcs == VCS_PERFORCE:
2045
+ return PerforceVCS(options)
2046
+ elif vcs == VCS_GIT:
2047
+ return GitVCS(options)
2048
+ elif vcs == VCS_CVS:
2049
+ return CVSVCS(options)
2050
+
2051
+ ErrorExit(("Could not guess version control system. "
2052
+ "Are you in a working copy directory?"))
2053
+
2054
+
2055
+ def CheckReviewer(reviewer):
2056
+ """Validate a reviewer -- either a nickname or an email addres.
2057
+
2058
+ Args:
2059
+ reviewer: A nickname or an email address.
2060
+
2061
+ Calls ErrorExit() if it is an invalid email address.
2062
+ """
2063
+ if "@" not in reviewer:
2064
+ return # Assume nickname
2065
+ parts = reviewer.split("@")
2066
+ if len(parts) > 2:
2067
+ ErrorExit("Invalid email address: %r" % reviewer)
2068
+ assert len(parts) == 2
2069
+ if "." not in parts[1]:
2070
+ ErrorExit("Invalid email address: %r" % reviewer)
2071
+
2072
+
2073
+ def LoadSubversionAutoProperties():
2074
+ """Returns the content of [auto-props] section of Subversion's config file as
2075
+ a dictionary.
2076
+
2077
+ Returns:
2078
+ A dictionary whose key-value pair corresponds the [auto-props] section's
2079
+ key-value pair.
2080
+ In following cases, returns empty dictionary:
2081
+ - config file doesn't exist, or
2082
+ - 'enable-auto-props' is not set to 'true-like-value' in [miscellany].
2083
+ """
2084
+ if os.name == 'nt':
2085
+ subversion_config = os.environ.get("APPDATA") + "\\Subversion\\config"
2086
+ else:
2087
+ subversion_config = os.path.expanduser("~/.subversion/config")
2088
+ if not os.path.exists(subversion_config):
2089
+ return {}
2090
+ config = ConfigParser.ConfigParser()
2091
+ config.read(subversion_config)
2092
+ if (config.has_section("miscellany") and
2093
+ config.has_option("miscellany", "enable-auto-props") and
2094
+ config.getboolean("miscellany", "enable-auto-props") and
2095
+ config.has_section("auto-props")):
2096
+ props = {}
2097
+ for file_pattern in config.options("auto-props"):
2098
+ props[file_pattern] = ParseSubversionPropertyValues(
2099
+ config.get("auto-props", file_pattern))
2100
+ return props
2101
+ else:
2102
+ return {}
2103
+
2104
+ def ParseSubversionPropertyValues(props):
2105
+ """Parse the given property value which comes from [auto-props] section and
2106
+ returns a list whose element is a (svn_prop_key, svn_prop_value) pair.
2107
+
2108
+ See the following doctest for example.
2109
+
2110
+ >>> ParseSubversionPropertyValues('svn:eol-style=LF')
2111
+ [('svn:eol-style', 'LF')]
2112
+ >>> ParseSubversionPropertyValues('svn:mime-type=image/jpeg')
2113
+ [('svn:mime-type', 'image/jpeg')]
2114
+ >>> ParseSubversionPropertyValues('svn:eol-style=LF;svn:executable')
2115
+ [('svn:eol-style', 'LF'), ('svn:executable', '*')]
2116
+ """
2117
+ key_value_pairs = []
2118
+ for prop in props.split(";"):
2119
+ key_value = prop.split("=")
2120
+ assert len(key_value) <= 2
2121
+ if len(key_value) == 1:
2122
+ # If value is not given, use '*' as a Subversion's convention.
2123
+ key_value_pairs.append((key_value[0], "*"))
2124
+ else:
2125
+ key_value_pairs.append((key_value[0], key_value[1]))
2126
+ return key_value_pairs
2127
+
2128
+
2129
+ def GetSubversionPropertyChanges(filename):
2130
+ """Return a Subversion's 'Property changes on ...' string, which is used in
2131
+ the patch file.
2132
+
2133
+ Args:
2134
+ filename: filename whose property might be set by [auto-props] config.
2135
+
2136
+ Returns:
2137
+ A string like 'Property changes on |filename| ...' if given |filename|
2138
+ matches any entries in [auto-props] section. None, otherwise.
2139
+ """
2140
+ global svn_auto_props_map
2141
+ if svn_auto_props_map is None:
2142
+ svn_auto_props_map = LoadSubversionAutoProperties()
2143
+
2144
+ all_props = []
2145
+ for file_pattern, props in svn_auto_props_map.items():
2146
+ if fnmatch.fnmatch(filename, file_pattern):
2147
+ all_props.extend(props)
2148
+ if all_props:
2149
+ return FormatSubversionPropertyChanges(filename, all_props)
2150
+ return None
2151
+
2152
+
2153
+ def FormatSubversionPropertyChanges(filename, props):
2154
+ """Returns Subversion's 'Property changes on ...' strings using given filename
2155
+ and properties.
2156
+
2157
+ Args:
2158
+ filename: filename
2159
+ props: A list whose element is a (svn_prop_key, svn_prop_value) pair.
2160
+
2161
+ Returns:
2162
+ A string which can be used in the patch file for Subversion.
2163
+
2164
+ See the following doctest for example.
2165
+
2166
+ >>> print FormatSubversionPropertyChanges('foo.cc', [('svn:eol-style', 'LF')])
2167
+ Property changes on: foo.cc
2168
+ ___________________________________________________________________
2169
+ Added: svn:eol-style
2170
+ + LF
2171
+ <BLANKLINE>
2172
+ """
2173
+ prop_changes_lines = [
2174
+ "Property changes on: %s" % filename,
2175
+ "___________________________________________________________________"]
2176
+ for key, value in props:
2177
+ prop_changes_lines.append("Added: " + key)
2178
+ prop_changes_lines.append(" + " + value)
2179
+ return "\n".join(prop_changes_lines) + "\n"
2180
+
2181
+
2182
+ def RealMain(argv, data=None):
2183
+ """The real main function.
2184
+
2185
+ Args:
2186
+ argv: Command line arguments.
2187
+ data: Diff contents. If None (default) the diff is generated by
2188
+ the VersionControlSystem implementation returned by GuessVCS().
2189
+
2190
+ Returns:
2191
+ A 2-tuple (issue id, patchset id).
2192
+ The patchset id is None if the base files are not uploaded by this
2193
+ script (applies only to SVN checkouts).
2194
+ """
2195
+ options, args = parser.parse_args(argv[1:])
2196
+ if options.help:
2197
+ if options.verbose < 2:
2198
+ # hide Perforce options
2199
+ parser.epilog = "Use '--help -v' to show additional Perforce options."
2200
+ parser.option_groups.remove(parser.get_option_group('--p4_port'))
2201
+ parser.print_help()
2202
+ sys.exit(0)
2203
+
2204
+ global verbosity
2205
+ verbosity = options.verbose
2206
+ if verbosity >= 3:
2207
+ logging.getLogger().setLevel(logging.DEBUG)
2208
+ elif verbosity >= 2:
2209
+ logging.getLogger().setLevel(logging.INFO)
2210
+
2211
+ vcs = GuessVCS(options)
2212
+
2213
+ base = options.base_url
2214
+ if isinstance(vcs, SubversionVCS):
2215
+ # Guessing the base field is only supported for Subversion.
2216
+ # Note: Fetching base files may become deprecated in future releases.
2217
+ guessed_base = vcs.GuessBase(options.download_base)
2218
+ if base:
2219
+ if guessed_base and base != guessed_base:
2220
+ print "Using base URL \"%s\" from --base_url instead of \"%s\"" % \
2221
+ (base, guessed_base)
2222
+ else:
2223
+ base = guessed_base
2224
+
2225
+ if not base and options.download_base:
2226
+ options.download_base = True
2227
+ logging.info("Enabled upload of base file")
2228
+ if not options.assume_yes:
2229
+ vcs.CheckForUnknownFiles()
2230
+ if data is None:
2231
+ data = vcs.GenerateDiff(args)
2232
+ data = vcs.PostProcessDiff(data)
2233
+ if options.print_diffs:
2234
+ print "Rietveld diff start:*****"
2235
+ print data
2236
+ print "Rietveld diff end:*****"
2237
+ files = vcs.GetBaseFiles(data)
2238
+ if verbosity >= 1:
2239
+ print "Upload server:", options.server, "(change with -s/--server)"
2240
+ rpc_server = GetRpcServer(options.server,
2241
+ options.email,
2242
+ options.host,
2243
+ options.save_cookies,
2244
+ options.account_type)
2245
+ form_fields = []
2246
+
2247
+ repo_guid = vcs.GetGUID()
2248
+ if repo_guid:
2249
+ form_fields.append(("repo_guid", repo_guid))
2250
+ if base:
2251
+ b = urlparse.urlparse(base)
2252
+ username, netloc = urllib.splituser(b.netloc)
2253
+ if username:
2254
+ logging.info("Removed username from base URL")
2255
+ base = urlparse.urlunparse((b.scheme, netloc, b.path, b.params,
2256
+ b.query, b.fragment))
2257
+ form_fields.append(("base", base))
2258
+ if options.issue:
2259
+ form_fields.append(("issue", str(options.issue)))
2260
+ if options.email:
2261
+ form_fields.append(("user", options.email))
2262
+ if options.reviewers:
2263
+ for reviewer in options.reviewers.split(','):
2264
+ CheckReviewer(reviewer)
2265
+ form_fields.append(("reviewers", options.reviewers))
2266
+ if options.cc:
2267
+ for cc in options.cc.split(','):
2268
+ CheckReviewer(cc)
2269
+ form_fields.append(("cc", options.cc))
2270
+
2271
+ # Process --message, --title and --file.
2272
+ message = options.message or ""
2273
+ title = options.title or ""
2274
+ if options.file:
2275
+ if options.message:
2276
+ ErrorExit("Can't specify both message and message file options")
2277
+ file = open(options.file, 'r')
2278
+ message = file.read()
2279
+ file.close()
2280
+ if options.issue:
2281
+ prompt = "Title describing this patch set: "
2282
+ else:
2283
+ prompt = "New issue subject: "
2284
+ title = (
2285
+ title or message.split('\n', 1)[0].strip() or raw_input(prompt).strip())
2286
+ if not title and not options.issue:
2287
+ ErrorExit("A non-empty title is required for a new issue")
2288
+ # For existing issues, it's fine to give a patchset an empty name. Rietveld
2289
+ # doesn't accept that so use a whitespace.
2290
+ title = title or " "
2291
+ if len(title) > 100:
2292
+ title = title[:99] + '…'
2293
+ if title and not options.issue:
2294
+ message = message or title
2295
+
2296
+ form_fields.append(("subject", title))
2297
+ # If it's a new issue send message as description. Otherwise a new
2298
+ # message is created below on upload_complete.
2299
+ if message and not options.issue:
2300
+ form_fields.append(("description", message))
2301
+
2302
+ # Send a hash of all the base file so the server can determine if a copy
2303
+ # already exists in an earlier patchset.
2304
+ base_hashes = ""
2305
+ for file, info in files.iteritems():
2306
+ if not info[0] is None:
2307
+ checksum = md5(info[0]).hexdigest()
2308
+ if base_hashes:
2309
+ base_hashes += "|"
2310
+ base_hashes += checksum + ":" + file
2311
+ form_fields.append(("base_hashes", base_hashes))
2312
+ if options.private:
2313
+ if options.issue:
2314
+ print "Warning: Private flag ignored when updating an existing issue."
2315
+ else:
2316
+ form_fields.append(("private", "1"))
2317
+ if options.send_patch:
2318
+ options.send_mail = True
2319
+ if not options.download_base:
2320
+ form_fields.append(("content_upload", "1"))
2321
+ if len(data) > MAX_UPLOAD_SIZE:
2322
+ print "Patch is large, so uploading file patches separately."
2323
+ uploaded_diff_file = []
2324
+ form_fields.append(("separate_patches", "1"))
2325
+ else:
2326
+ uploaded_diff_file = [("data", "data.diff", data)]
2327
+ ctype, body = EncodeMultipartFormData(form_fields, uploaded_diff_file)
2328
+ response_body = rpc_server.Send("/upload", body, content_type=ctype)
2329
+ patchset = None
2330
+ if not options.download_base or not uploaded_diff_file:
2331
+ lines = response_body.splitlines()
2332
+ if len(lines) >= 2:
2333
+ msg = lines[0]
2334
+ patchset = lines[1].strip()
2335
+ patches = [x.split(" ", 1) for x in lines[2:]]
2336
+ else:
2337
+ msg = response_body
2338
+ else:
2339
+ msg = response_body
2340
+ StatusUpdate(msg)
2341
+ if not response_body.startswith("Issue created.") and \
2342
+ not response_body.startswith("Issue updated."):
2343
+ sys.exit(0)
2344
+ issue = msg[msg.rfind("/")+1:]
2345
+
2346
+ if not uploaded_diff_file:
2347
+ result = UploadSeparatePatches(issue, rpc_server, patchset, data, options)
2348
+ if not options.download_base:
2349
+ patches = result
2350
+
2351
+ if not options.download_base:
2352
+ vcs.UploadBaseFiles(issue, rpc_server, patches, patchset, options, files)
2353
+
2354
+ payload = {} # payload for final request
2355
+ if options.send_mail:
2356
+ payload["send_mail"] = "yes"
2357
+ if options.send_patch:
2358
+ payload["attach_patch"] = "yes"
2359
+ if options.issue and message:
2360
+ payload["message"] = message
2361
+ payload = urllib.urlencode(payload)
2362
+ rpc_server.Send("/" + issue + "/upload_complete/" + (patchset or ""),
2363
+ payload=payload)
2364
+ return issue, patchset
2365
+
2366
+
2367
+ def main():
2368
+ try:
2369
+ logging.basicConfig(format=("%(asctime).19s %(levelname)s %(filename)s:"
2370
+ "%(lineno)s %(message)s "))
2371
+ os.environ['LC_ALL'] = 'C'
2372
+ RealMain(sys.argv)
2373
+ except KeyboardInterrupt:
2374
+ print
2375
+ StatusUpdate("Interrupted.")
2376
+ sys.exit(1)
2377
+
2378
+
2379
+ if __name__ == "__main__":
2380
+ main()