rigid 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (364) hide show
  1. checksums.yaml +7 -0
  2. data/bin/rigid +4 -0
  3. data/rigid +16 -0
  4. data/vendor/PyYAML-3.12.dist-info/DESCRIPTION.rst +12 -0
  5. data/vendor/PyYAML-3.12.dist-info/INSTALLER +1 -0
  6. data/vendor/PyYAML-3.12.dist-info/METADATA +35 -0
  7. data/vendor/PyYAML-3.12.dist-info/RECORD +42 -0
  8. data/vendor/PyYAML-3.12.dist-info/WHEEL +5 -0
  9. data/vendor/PyYAML-3.12.dist-info/metadata.json +1 -0
  10. data/vendor/PyYAML-3.12.dist-info/top_level.txt +2 -0
  11. data/vendor/_yaml.so +0 -0
  12. data/vendor/click/__init__.py +98 -0
  13. data/vendor/click/__init__.pyc +0 -0
  14. data/vendor/click/_bashcomplete.py +83 -0
  15. data/vendor/click/_bashcomplete.pyc +0 -0
  16. data/vendor/click/_compat.py +642 -0
  17. data/vendor/click/_compat.pyc +0 -0
  18. data/vendor/click/_termui_impl.py +547 -0
  19. data/vendor/click/_termui_impl.pyc +0 -0
  20. data/vendor/click/_textwrap.py +38 -0
  21. data/vendor/click/_textwrap.pyc +0 -0
  22. data/vendor/click/_unicodefun.py +119 -0
  23. data/vendor/click/_unicodefun.pyc +0 -0
  24. data/vendor/click/_winconsole.py +273 -0
  25. data/vendor/click/_winconsole.pyc +0 -0
  26. data/vendor/click/core.py +1738 -0
  27. data/vendor/click/core.pyc +0 -0
  28. data/vendor/click/decorators.py +304 -0
  29. data/vendor/click/decorators.pyc +0 -0
  30. data/vendor/click/exceptions.py +201 -0
  31. data/vendor/click/exceptions.pyc +0 -0
  32. data/vendor/click/formatting.py +256 -0
  33. data/vendor/click/formatting.pyc +0 -0
  34. data/vendor/click/globals.py +48 -0
  35. data/vendor/click/globals.pyc +0 -0
  36. data/vendor/click/parser.py +426 -0
  37. data/vendor/click/parser.pyc +0 -0
  38. data/vendor/click/termui.py +539 -0
  39. data/vendor/click/termui.pyc +0 -0
  40. data/vendor/click/testing.py +322 -0
  41. data/vendor/click/testing.pyc +0 -0
  42. data/vendor/click/types.py +550 -0
  43. data/vendor/click/types.pyc +0 -0
  44. data/vendor/click/utils.py +415 -0
  45. data/vendor/click/utils.pyc +0 -0
  46. data/vendor/click-6.6.dist-info/DESCRIPTION.rst +3 -0
  47. data/vendor/click-6.6.dist-info/INSTALLER +1 -0
  48. data/vendor/click-6.6.dist-info/METADATA +16 -0
  49. data/vendor/click-6.6.dist-info/RECORD +41 -0
  50. data/vendor/click-6.6.dist-info/WHEEL +6 -0
  51. data/vendor/click-6.6.dist-info/metadata.json +1 -0
  52. data/vendor/click-6.6.dist-info/top_level.txt +1 -0
  53. data/vendor/easy_install.py +5 -0
  54. data/vendor/easy_install.pyc +0 -0
  55. data/vendor/pip-9.0.1.dist-info/DESCRIPTION.rst +39 -0
  56. data/vendor/pip-9.0.1.dist-info/INSTALLER +1 -0
  57. data/vendor/pip-9.0.1.dist-info/METADATA +69 -0
  58. data/vendor/pip-9.0.1.dist-info/RECORD +501 -0
  59. data/vendor/pip-9.0.1.dist-info/WHEEL +6 -0
  60. data/vendor/pip-9.0.1.dist-info/entry_points.txt +5 -0
  61. data/vendor/pip-9.0.1.dist-info/metadata.json +1 -0
  62. data/vendor/pip-9.0.1.dist-info/top_level.txt +1 -0
  63. data/vendor/pkg_resources/__init__.py +3051 -0
  64. data/vendor/pkg_resources/__init__.pyc +0 -0
  65. data/vendor/pkg_resources/_vendor/__init__.py +0 -0
  66. data/vendor/pkg_resources/_vendor/__init__.pyc +0 -0
  67. data/vendor/pkg_resources/_vendor/appdirs.py +552 -0
  68. data/vendor/pkg_resources/_vendor/appdirs.pyc +0 -0
  69. data/vendor/pkg_resources/_vendor/packaging/__about__.py +21 -0
  70. data/vendor/pkg_resources/_vendor/packaging/__about__.pyc +0 -0
  71. data/vendor/pkg_resources/_vendor/packaging/__init__.py +14 -0
  72. data/vendor/pkg_resources/_vendor/packaging/__init__.pyc +0 -0
  73. data/vendor/pkg_resources/_vendor/packaging/_compat.py +30 -0
  74. data/vendor/pkg_resources/_vendor/packaging/_compat.pyc +0 -0
  75. data/vendor/pkg_resources/_vendor/packaging/_structures.py +68 -0
  76. data/vendor/pkg_resources/_vendor/packaging/_structures.pyc +0 -0
  77. data/vendor/pkg_resources/_vendor/packaging/markers.py +287 -0
  78. data/vendor/pkg_resources/_vendor/packaging/markers.pyc +0 -0
  79. data/vendor/pkg_resources/_vendor/packaging/requirements.py +127 -0
  80. data/vendor/pkg_resources/_vendor/packaging/requirements.pyc +0 -0
  81. data/vendor/pkg_resources/_vendor/packaging/specifiers.py +774 -0
  82. data/vendor/pkg_resources/_vendor/packaging/specifiers.pyc +0 -0
  83. data/vendor/pkg_resources/_vendor/packaging/utils.py +14 -0
  84. data/vendor/pkg_resources/_vendor/packaging/utils.pyc +0 -0
  85. data/vendor/pkg_resources/_vendor/packaging/version.py +393 -0
  86. data/vendor/pkg_resources/_vendor/packaging/version.pyc +0 -0
  87. data/vendor/pkg_resources/_vendor/pyparsing.py +5696 -0
  88. data/vendor/pkg_resources/_vendor/pyparsing.pyc +0 -0
  89. data/vendor/pkg_resources/_vendor/six.py +868 -0
  90. data/vendor/pkg_resources/_vendor/six.pyc +0 -0
  91. data/vendor/pkg_resources/extern/__init__.py +73 -0
  92. data/vendor/pkg_resources/extern/__init__.pyc +0 -0
  93. data/vendor/requests/__init__.py +86 -0
  94. data/vendor/requests/__init__.pyc +0 -0
  95. data/vendor/requests/adapters.py +503 -0
  96. data/vendor/requests/adapters.pyc +0 -0
  97. data/vendor/requests/api.py +148 -0
  98. data/vendor/requests/api.pyc +0 -0
  99. data/vendor/requests/auth.py +252 -0
  100. data/vendor/requests/auth.pyc +0 -0
  101. data/vendor/requests/cacert.pem +5616 -0
  102. data/vendor/requests/certs.py +25 -0
  103. data/vendor/requests/certs.pyc +0 -0
  104. data/vendor/requests/compat.py +66 -0
  105. data/vendor/requests/compat.pyc +0 -0
  106. data/vendor/requests/cookies.py +540 -0
  107. data/vendor/requests/cookies.pyc +0 -0
  108. data/vendor/requests/exceptions.py +114 -0
  109. data/vendor/requests/exceptions.pyc +0 -0
  110. data/vendor/requests/hooks.py +34 -0
  111. data/vendor/requests/hooks.pyc +0 -0
  112. data/vendor/requests/models.py +873 -0
  113. data/vendor/requests/models.pyc +0 -0
  114. data/vendor/requests/packages/__init__.py +36 -0
  115. data/vendor/requests/packages/__init__.pyc +0 -0
  116. data/vendor/requests/packages/chardet/__init__.py +32 -0
  117. data/vendor/requests/packages/chardet/__init__.pyc +0 -0
  118. data/vendor/requests/packages/chardet/big5freq.py +925 -0
  119. data/vendor/requests/packages/chardet/big5freq.pyc +0 -0
  120. data/vendor/requests/packages/chardet/big5prober.py +42 -0
  121. data/vendor/requests/packages/chardet/big5prober.pyc +0 -0
  122. data/vendor/requests/packages/chardet/chardetect.py +80 -0
  123. data/vendor/requests/packages/chardet/chardetect.pyc +0 -0
  124. data/vendor/requests/packages/chardet/chardistribution.py +231 -0
  125. data/vendor/requests/packages/chardet/chardistribution.pyc +0 -0
  126. data/vendor/requests/packages/chardet/charsetgroupprober.py +106 -0
  127. data/vendor/requests/packages/chardet/charsetgroupprober.pyc +0 -0
  128. data/vendor/requests/packages/chardet/charsetprober.py +62 -0
  129. data/vendor/requests/packages/chardet/charsetprober.pyc +0 -0
  130. data/vendor/requests/packages/chardet/codingstatemachine.py +61 -0
  131. data/vendor/requests/packages/chardet/codingstatemachine.pyc +0 -0
  132. data/vendor/requests/packages/chardet/compat.py +34 -0
  133. data/vendor/requests/packages/chardet/compat.pyc +0 -0
  134. data/vendor/requests/packages/chardet/constants.py +39 -0
  135. data/vendor/requests/packages/chardet/constants.pyc +0 -0
  136. data/vendor/requests/packages/chardet/cp949prober.py +44 -0
  137. data/vendor/requests/packages/chardet/cp949prober.pyc +0 -0
  138. data/vendor/requests/packages/chardet/escprober.py +86 -0
  139. data/vendor/requests/packages/chardet/escprober.pyc +0 -0
  140. data/vendor/requests/packages/chardet/escsm.py +242 -0
  141. data/vendor/requests/packages/chardet/escsm.pyc +0 -0
  142. data/vendor/requests/packages/chardet/eucjpprober.py +90 -0
  143. data/vendor/requests/packages/chardet/eucjpprober.pyc +0 -0
  144. data/vendor/requests/packages/chardet/euckrfreq.py +596 -0
  145. data/vendor/requests/packages/chardet/euckrfreq.pyc +0 -0
  146. data/vendor/requests/packages/chardet/euckrprober.py +42 -0
  147. data/vendor/requests/packages/chardet/euckrprober.pyc +0 -0
  148. data/vendor/requests/packages/chardet/euctwfreq.py +428 -0
  149. data/vendor/requests/packages/chardet/euctwfreq.pyc +0 -0
  150. data/vendor/requests/packages/chardet/euctwprober.py +41 -0
  151. data/vendor/requests/packages/chardet/euctwprober.pyc +0 -0
  152. data/vendor/requests/packages/chardet/gb2312freq.py +472 -0
  153. data/vendor/requests/packages/chardet/gb2312freq.pyc +0 -0
  154. data/vendor/requests/packages/chardet/gb2312prober.py +41 -0
  155. data/vendor/requests/packages/chardet/gb2312prober.pyc +0 -0
  156. data/vendor/requests/packages/chardet/hebrewprober.py +283 -0
  157. data/vendor/requests/packages/chardet/hebrewprober.pyc +0 -0
  158. data/vendor/requests/packages/chardet/jisfreq.py +569 -0
  159. data/vendor/requests/packages/chardet/jisfreq.pyc +0 -0
  160. data/vendor/requests/packages/chardet/jpcntx.py +227 -0
  161. data/vendor/requests/packages/chardet/jpcntx.pyc +0 -0
  162. data/vendor/requests/packages/chardet/langbulgarianmodel.py +229 -0
  163. data/vendor/requests/packages/chardet/langbulgarianmodel.pyc +0 -0
  164. data/vendor/requests/packages/chardet/langcyrillicmodel.py +329 -0
  165. data/vendor/requests/packages/chardet/langcyrillicmodel.pyc +0 -0
  166. data/vendor/requests/packages/chardet/langgreekmodel.py +225 -0
  167. data/vendor/requests/packages/chardet/langgreekmodel.pyc +0 -0
  168. data/vendor/requests/packages/chardet/langhebrewmodel.py +201 -0
  169. data/vendor/requests/packages/chardet/langhebrewmodel.pyc +0 -0
  170. data/vendor/requests/packages/chardet/langhungarianmodel.py +225 -0
  171. data/vendor/requests/packages/chardet/langhungarianmodel.pyc +0 -0
  172. data/vendor/requests/packages/chardet/langthaimodel.py +200 -0
  173. data/vendor/requests/packages/chardet/langthaimodel.pyc +0 -0
  174. data/vendor/requests/packages/chardet/latin1prober.py +139 -0
  175. data/vendor/requests/packages/chardet/latin1prober.pyc +0 -0
  176. data/vendor/requests/packages/chardet/mbcharsetprober.py +86 -0
  177. data/vendor/requests/packages/chardet/mbcharsetprober.pyc +0 -0
  178. data/vendor/requests/packages/chardet/mbcsgroupprober.py +54 -0
  179. data/vendor/requests/packages/chardet/mbcsgroupprober.pyc +0 -0
  180. data/vendor/requests/packages/chardet/mbcssm.py +572 -0
  181. data/vendor/requests/packages/chardet/mbcssm.pyc +0 -0
  182. data/vendor/requests/packages/chardet/sbcharsetprober.py +120 -0
  183. data/vendor/requests/packages/chardet/sbcharsetprober.pyc +0 -0
  184. data/vendor/requests/packages/chardet/sbcsgroupprober.py +69 -0
  185. data/vendor/requests/packages/chardet/sbcsgroupprober.pyc +0 -0
  186. data/vendor/requests/packages/chardet/sjisprober.py +91 -0
  187. data/vendor/requests/packages/chardet/sjisprober.pyc +0 -0
  188. data/vendor/requests/packages/chardet/universaldetector.py +170 -0
  189. data/vendor/requests/packages/chardet/universaldetector.pyc +0 -0
  190. data/vendor/requests/packages/chardet/utf8prober.py +76 -0
  191. data/vendor/requests/packages/chardet/utf8prober.pyc +0 -0
  192. data/vendor/requests/packages/urllib3/__init__.py +96 -0
  193. data/vendor/requests/packages/urllib3/__init__.pyc +0 -0
  194. data/vendor/requests/packages/urllib3/_collections.py +324 -0
  195. data/vendor/requests/packages/urllib3/_collections.pyc +0 -0
  196. data/vendor/requests/packages/urllib3/connection.py +330 -0
  197. data/vendor/requests/packages/urllib3/connection.pyc +0 -0
  198. data/vendor/requests/packages/urllib3/connectionpool.py +866 -0
  199. data/vendor/requests/packages/urllib3/connectionpool.pyc +0 -0
  200. data/vendor/requests/packages/urllib3/contrib/__init__.py +0 -0
  201. data/vendor/requests/packages/urllib3/contrib/__init__.pyc +0 -0
  202. data/vendor/requests/packages/urllib3/contrib/appengine.py +231 -0
  203. data/vendor/requests/packages/urllib3/contrib/appengine.pyc +0 -0
  204. data/vendor/requests/packages/urllib3/contrib/ntlmpool.py +115 -0
  205. data/vendor/requests/packages/urllib3/contrib/ntlmpool.pyc +0 -0
  206. data/vendor/requests/packages/urllib3/contrib/pyopenssl.py +358 -0
  207. data/vendor/requests/packages/urllib3/contrib/pyopenssl.pyc +0 -0
  208. data/vendor/requests/packages/urllib3/contrib/socks.py +172 -0
  209. data/vendor/requests/packages/urllib3/contrib/socks.pyc +0 -0
  210. data/vendor/requests/packages/urllib3/exceptions.py +209 -0
  211. data/vendor/requests/packages/urllib3/exceptions.pyc +0 -0
  212. data/vendor/requests/packages/urllib3/fields.py +178 -0
  213. data/vendor/requests/packages/urllib3/fields.pyc +0 -0
  214. data/vendor/requests/packages/urllib3/filepost.py +94 -0
  215. data/vendor/requests/packages/urllib3/filepost.pyc +0 -0
  216. data/vendor/requests/packages/urllib3/packages/__init__.py +5 -0
  217. data/vendor/requests/packages/urllib3/packages/__init__.pyc +0 -0
  218. data/vendor/requests/packages/urllib3/packages/ordered_dict.py +259 -0
  219. data/vendor/requests/packages/urllib3/packages/ordered_dict.pyc +0 -0
  220. data/vendor/requests/packages/urllib3/packages/six.py +868 -0
  221. data/vendor/requests/packages/urllib3/packages/six.pyc +0 -0
  222. data/vendor/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py +13 -0
  223. data/vendor/requests/packages/urllib3/packages/ssl_match_hostname/__init__.pyc +0 -0
  224. data/vendor/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.py +105 -0
  225. data/vendor/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.pyc +0 -0
  226. data/vendor/requests/packages/urllib3/poolmanager.py +367 -0
  227. data/vendor/requests/packages/urllib3/poolmanager.pyc +0 -0
  228. data/vendor/requests/packages/urllib3/request.py +151 -0
  229. data/vendor/requests/packages/urllib3/request.pyc +0 -0
  230. data/vendor/requests/packages/urllib3/response.py +530 -0
  231. data/vendor/requests/packages/urllib3/response.pyc +0 -0
  232. data/vendor/requests/packages/urllib3/util/__init__.py +46 -0
  233. data/vendor/requests/packages/urllib3/util/__init__.pyc +0 -0
  234. data/vendor/requests/packages/urllib3/util/connection.py +144 -0
  235. data/vendor/requests/packages/urllib3/util/connection.pyc +0 -0
  236. data/vendor/requests/packages/urllib3/util/request.py +72 -0
  237. data/vendor/requests/packages/urllib3/util/request.pyc +0 -0
  238. data/vendor/requests/packages/urllib3/util/response.py +74 -0
  239. data/vendor/requests/packages/urllib3/util/response.pyc +0 -0
  240. data/vendor/requests/packages/urllib3/util/retry.py +300 -0
  241. data/vendor/requests/packages/urllib3/util/retry.pyc +0 -0
  242. data/vendor/requests/packages/urllib3/util/ssl_.py +320 -0
  243. data/vendor/requests/packages/urllib3/util/ssl_.pyc +0 -0
  244. data/vendor/requests/packages/urllib3/util/timeout.py +242 -0
  245. data/vendor/requests/packages/urllib3/util/timeout.pyc +0 -0
  246. data/vendor/requests/packages/urllib3/util/url.py +217 -0
  247. data/vendor/requests/packages/urllib3/util/url.pyc +0 -0
  248. data/vendor/requests/sessions.py +712 -0
  249. data/vendor/requests/sessions.pyc +0 -0
  250. data/vendor/requests/status_codes.py +91 -0
  251. data/vendor/requests/status_codes.pyc +0 -0
  252. data/vendor/requests/structures.py +105 -0
  253. data/vendor/requests/structures.pyc +0 -0
  254. data/vendor/requests/utils.py +817 -0
  255. data/vendor/requests/utils.pyc +0 -0
  256. data/vendor/requests-2.11.1.dist-info/DESCRIPTION.rst +1294 -0
  257. data/vendor/requests-2.11.1.dist-info/INSTALLER +1 -0
  258. data/vendor/requests-2.11.1.dist-info/METADATA +1323 -0
  259. data/vendor/requests-2.11.1.dist-info/RECORD +170 -0
  260. data/vendor/requests-2.11.1.dist-info/WHEEL +6 -0
  261. data/vendor/requests-2.11.1.dist-info/metadata.json +1 -0
  262. data/vendor/requests-2.11.1.dist-info/top_level.txt +1 -0
  263. data/vendor/rigid/__init__.py +1 -0
  264. data/vendor/rigid/__init__.pyc +0 -0
  265. data/vendor/rigid/api.py +129 -0
  266. data/vendor/rigid/api.pyc +0 -0
  267. data/vendor/rigid/commands/__init__.py +260 -0
  268. data/vendor/rigid/commands/__init__.pyc +0 -0
  269. data/vendor/rigid/commands/deploy.py +0 -0
  270. data/vendor/rigid/commands/deploy.pyc +0 -0
  271. data/vendor/rigid/deploy.py +70 -0
  272. data/vendor/rigid/deploy.pyc +0 -0
  273. data/vendor/rigid/file_scanner.py +63 -0
  274. data/vendor/rigid/file_scanner.pyc +0 -0
  275. data/vendor/rigid/utils.py +17 -0
  276. data/vendor/rigid/utils.pyc +0 -0
  277. data/vendor/rigid-0.2.0.dist-info/DESCRIPTION.rst +3 -0
  278. data/vendor/rigid-0.2.0.dist-info/INSTALLER +1 -0
  279. data/vendor/rigid-0.2.0.dist-info/METADATA +23 -0
  280. data/vendor/rigid-0.2.0.dist-info/RECORD +49 -0
  281. data/vendor/rigid-0.2.0.dist-info/WHEEL +5 -0
  282. data/vendor/rigid-0.2.0.dist-info/entry_points.txt +3 -0
  283. data/vendor/rigid-0.2.0.dist-info/metadata.json +1 -0
  284. data/vendor/rigid-0.2.0.dist-info/top_level.txt +2 -0
  285. data/vendor/setuptools-28.8.0.dist-info/DESCRIPTION.rst +243 -0
  286. data/vendor/setuptools-28.8.0.dist-info/INSTALLER +1 -0
  287. data/vendor/setuptools-28.8.0.dist-info/METADATA +272 -0
  288. data/vendor/setuptools-28.8.0.dist-info/RECORD +143 -0
  289. data/vendor/setuptools-28.8.0.dist-info/WHEEL +6 -0
  290. data/vendor/setuptools-28.8.0.dist-info/dependency_links.txt +2 -0
  291. data/vendor/setuptools-28.8.0.dist-info/entry_points.txt +63 -0
  292. data/vendor/setuptools-28.8.0.dist-info/metadata.json +1 -0
  293. data/vendor/setuptools-28.8.0.dist-info/top_level.txt +3 -0
  294. data/vendor/setuptools-28.8.0.dist-info/zip-safe +1 -0
  295. data/vendor/tests/__init__.py +0 -0
  296. data/vendor/tests/__init__.pyc +0 -0
  297. data/vendor/tests/integration/__init__.py +0 -0
  298. data/vendor/tests/integration/__init__.pyc +0 -0
  299. data/vendor/tests/integration/test_app.py +63 -0
  300. data/vendor/tests/integration/test_app.pyc +0 -0
  301. data/vendor/tests/integration/test_apps.py +27 -0
  302. data/vendor/tests/integration/test_apps.pyc +0 -0
  303. data/vendor/tests/integration/test_deploy.py +128 -0
  304. data/vendor/tests/integration/test_deploy.pyc +0 -0
  305. data/vendor/tests/integration/test_domains.py +35 -0
  306. data/vendor/tests/integration/test_domains.pyc +0 -0
  307. data/vendor/tests/integration/test_login.py +37 -0
  308. data/vendor/tests/integration/test_login.pyc +0 -0
  309. data/vendor/tests/integration/test_promote.py +24 -0
  310. data/vendor/tests/integration/test_promote.pyc +0 -0
  311. data/vendor/tests/integration/test_token.py +33 -0
  312. data/vendor/tests/integration/test_token.pyc +0 -0
  313. data/vendor/tests/integration/test_whoami.py +24 -0
  314. data/vendor/tests/integration/test_whoami.pyc +0 -0
  315. data/vendor/tests/test_deploy.py +33 -0
  316. data/vendor/tests/test_deploy.pyc +0 -0
  317. data/vendor/tests/test_file_scanner.py +89 -0
  318. data/vendor/tests/test_file_scanner.pyc +0 -0
  319. data/vendor/tests/utils.py +78 -0
  320. data/vendor/tests/utils.pyc +0 -0
  321. data/vendor/wheel-0.30.0a0.dist-info/DESCRIPTION.rst +325 -0
  322. data/vendor/wheel-0.30.0a0.dist-info/INSTALLER +1 -0
  323. data/vendor/wheel-0.30.0a0.dist-info/LICENSE.txt +22 -0
  324. data/vendor/wheel-0.30.0a0.dist-info/METADATA +357 -0
  325. data/vendor/wheel-0.30.0a0.dist-info/RECORD +86 -0
  326. data/vendor/wheel-0.30.0a0.dist-info/WHEEL +6 -0
  327. data/vendor/wheel-0.30.0a0.dist-info/entry_points.txt +6 -0
  328. data/vendor/wheel-0.30.0a0.dist-info/metadata.json +1 -0
  329. data/vendor/wheel-0.30.0a0.dist-info/top_level.txt +1 -0
  330. data/vendor/yaml/__init__.py +315 -0
  331. data/vendor/yaml/__init__.pyc +0 -0
  332. data/vendor/yaml/composer.py +139 -0
  333. data/vendor/yaml/composer.pyc +0 -0
  334. data/vendor/yaml/constructor.py +675 -0
  335. data/vendor/yaml/constructor.pyc +0 -0
  336. data/vendor/yaml/cyaml.py +85 -0
  337. data/vendor/yaml/cyaml.pyc +0 -0
  338. data/vendor/yaml/dumper.py +62 -0
  339. data/vendor/yaml/dumper.pyc +0 -0
  340. data/vendor/yaml/emitter.py +1140 -0
  341. data/vendor/yaml/emitter.pyc +0 -0
  342. data/vendor/yaml/error.py +75 -0
  343. data/vendor/yaml/error.pyc +0 -0
  344. data/vendor/yaml/events.py +86 -0
  345. data/vendor/yaml/events.pyc +0 -0
  346. data/vendor/yaml/loader.py +40 -0
  347. data/vendor/yaml/loader.pyc +0 -0
  348. data/vendor/yaml/nodes.py +49 -0
  349. data/vendor/yaml/nodes.pyc +0 -0
  350. data/vendor/yaml/parser.py +589 -0
  351. data/vendor/yaml/parser.pyc +0 -0
  352. data/vendor/yaml/reader.py +190 -0
  353. data/vendor/yaml/reader.pyc +0 -0
  354. data/vendor/yaml/representer.py +486 -0
  355. data/vendor/yaml/representer.pyc +0 -0
  356. data/vendor/yaml/resolver.py +227 -0
  357. data/vendor/yaml/resolver.pyc +0 -0
  358. data/vendor/yaml/scanner.py +1453 -0
  359. data/vendor/yaml/scanner.pyc +0 -0
  360. data/vendor/yaml/serializer.py +111 -0
  361. data/vendor/yaml/serializer.pyc +0 -0
  362. data/vendor/yaml/tokens.py +104 -0
  363. data/vendor/yaml/tokens.pyc +0 -0
  364. metadata +407 -0
@@ -0,0 +1,1453 @@
1
+
2
+ # Scanner produces tokens of the following types:
3
+ # STREAM-START
4
+ # STREAM-END
5
+ # DIRECTIVE(name, value)
6
+ # DOCUMENT-START
7
+ # DOCUMENT-END
8
+ # BLOCK-SEQUENCE-START
9
+ # BLOCK-MAPPING-START
10
+ # BLOCK-END
11
+ # FLOW-SEQUENCE-START
12
+ # FLOW-MAPPING-START
13
+ # FLOW-SEQUENCE-END
14
+ # FLOW-MAPPING-END
15
+ # BLOCK-ENTRY
16
+ # FLOW-ENTRY
17
+ # KEY
18
+ # VALUE
19
+ # ALIAS(value)
20
+ # ANCHOR(value)
21
+ # TAG(value)
22
+ # SCALAR(value, plain, style)
23
+ #
24
+ # Read comments in the Scanner code for more details.
25
+ #
26
+
27
+ __all__ = ['Scanner', 'ScannerError']
28
+
29
+ from error import MarkedYAMLError
30
+ from tokens import *
31
+
32
+ class ScannerError(MarkedYAMLError):
33
+ pass
34
+
35
+ class SimpleKey(object):
36
+ # See below simple keys treatment.
37
+
38
+ def __init__(self, token_number, required, index, line, column, mark):
39
+ self.token_number = token_number
40
+ self.required = required
41
+ self.index = index
42
+ self.line = line
43
+ self.column = column
44
+ self.mark = mark
45
+
46
+ class Scanner(object):
47
+
48
+ def __init__(self):
49
+ """Initialize the scanner."""
50
+ # It is assumed that Scanner and Reader will have a common descendant.
51
+ # Reader do the dirty work of checking for BOM and converting the
52
+ # input data to Unicode. It also adds NUL to the end.
53
+ #
54
+ # Reader supports the following methods
55
+ # self.peek(i=0) # peek the next i-th character
56
+ # self.prefix(l=1) # peek the next l characters
57
+ # self.forward(l=1) # read the next l characters and move the pointer.
58
+
59
+ # Had we reached the end of the stream?
60
+ self.done = False
61
+
62
+ # The number of unclosed '{' and '['. `flow_level == 0` means block
63
+ # context.
64
+ self.flow_level = 0
65
+
66
+ # List of processed tokens that are not yet emitted.
67
+ self.tokens = []
68
+
69
+ # Add the STREAM-START token.
70
+ self.fetch_stream_start()
71
+
72
+ # Number of tokens that were emitted through the `get_token` method.
73
+ self.tokens_taken = 0
74
+
75
+ # The current indentation level.
76
+ self.indent = -1
77
+
78
+ # Past indentation levels.
79
+ self.indents = []
80
+
81
+ # Variables related to simple keys treatment.
82
+
83
+ # A simple key is a key that is not denoted by the '?' indicator.
84
+ # Example of simple keys:
85
+ # ---
86
+ # block simple key: value
87
+ # ? not a simple key:
88
+ # : { flow simple key: value }
89
+ # We emit the KEY token before all keys, so when we find a potential
90
+ # simple key, we try to locate the corresponding ':' indicator.
91
+ # Simple keys should be limited to a single line and 1024 characters.
92
+
93
+ # Can a simple key start at the current position? A simple key may
94
+ # start:
95
+ # - at the beginning of the line, not counting indentation spaces
96
+ # (in block context),
97
+ # - after '{', '[', ',' (in the flow context),
98
+ # - after '?', ':', '-' (in the block context).
99
+ # In the block context, this flag also signifies if a block collection
100
+ # may start at the current position.
101
+ self.allow_simple_key = True
102
+
103
+ # Keep track of possible simple keys. This is a dictionary. The key
104
+ # is `flow_level`; there can be no more that one possible simple key
105
+ # for each level. The value is a SimpleKey record:
106
+ # (token_number, required, index, line, column, mark)
107
+ # A simple key may start with ALIAS, ANCHOR, TAG, SCALAR(flow),
108
+ # '[', or '{' tokens.
109
+ self.possible_simple_keys = {}
110
+
111
+ # Public methods.
112
+
113
+ def check_token(self, *choices):
114
+ # Check if the next token is one of the given types.
115
+ while self.need_more_tokens():
116
+ self.fetch_more_tokens()
117
+ if self.tokens:
118
+ if not choices:
119
+ return True
120
+ for choice in choices:
121
+ if isinstance(self.tokens[0], choice):
122
+ return True
123
+ return False
124
+
125
+ def peek_token(self):
126
+ # Return the next token, but do not delete if from the queue.
127
+ while self.need_more_tokens():
128
+ self.fetch_more_tokens()
129
+ if self.tokens:
130
+ return self.tokens[0]
131
+
132
+ def get_token(self):
133
+ # Return the next token.
134
+ while self.need_more_tokens():
135
+ self.fetch_more_tokens()
136
+ if self.tokens:
137
+ self.tokens_taken += 1
138
+ return self.tokens.pop(0)
139
+
140
+ # Private methods.
141
+
142
+ def need_more_tokens(self):
143
+ if self.done:
144
+ return False
145
+ if not self.tokens:
146
+ return True
147
+ # The current token may be a potential simple key, so we
148
+ # need to look further.
149
+ self.stale_possible_simple_keys()
150
+ if self.next_possible_simple_key() == self.tokens_taken:
151
+ return True
152
+
153
+ def fetch_more_tokens(self):
154
+
155
+ # Eat whitespaces and comments until we reach the next token.
156
+ self.scan_to_next_token()
157
+
158
+ # Remove obsolete possible simple keys.
159
+ self.stale_possible_simple_keys()
160
+
161
+ # Compare the current indentation and column. It may add some tokens
162
+ # and decrease the current indentation level.
163
+ self.unwind_indent(self.column)
164
+
165
+ # Peek the next character.
166
+ ch = self.peek()
167
+
168
+ # Is it the end of stream?
169
+ if ch == u'\0':
170
+ return self.fetch_stream_end()
171
+
172
+ # Is it a directive?
173
+ if ch == u'%' and self.check_directive():
174
+ return self.fetch_directive()
175
+
176
+ # Is it the document start?
177
+ if ch == u'-' and self.check_document_start():
178
+ return self.fetch_document_start()
179
+
180
+ # Is it the document end?
181
+ if ch == u'.' and self.check_document_end():
182
+ return self.fetch_document_end()
183
+
184
+ # TODO: support for BOM within a stream.
185
+ #if ch == u'\uFEFF':
186
+ # return self.fetch_bom() <-- issue BOMToken
187
+
188
+ # Note: the order of the following checks is NOT significant.
189
+
190
+ # Is it the flow sequence start indicator?
191
+ if ch == u'[':
192
+ return self.fetch_flow_sequence_start()
193
+
194
+ # Is it the flow mapping start indicator?
195
+ if ch == u'{':
196
+ return self.fetch_flow_mapping_start()
197
+
198
+ # Is it the flow sequence end indicator?
199
+ if ch == u']':
200
+ return self.fetch_flow_sequence_end()
201
+
202
+ # Is it the flow mapping end indicator?
203
+ if ch == u'}':
204
+ return self.fetch_flow_mapping_end()
205
+
206
+ # Is it the flow entry indicator?
207
+ if ch == u',':
208
+ return self.fetch_flow_entry()
209
+
210
+ # Is it the block entry indicator?
211
+ if ch == u'-' and self.check_block_entry():
212
+ return self.fetch_block_entry()
213
+
214
+ # Is it the key indicator?
215
+ if ch == u'?' and self.check_key():
216
+ return self.fetch_key()
217
+
218
+ # Is it the value indicator?
219
+ if ch == u':' and self.check_value():
220
+ return self.fetch_value()
221
+
222
+ # Is it an alias?
223
+ if ch == u'*':
224
+ return self.fetch_alias()
225
+
226
+ # Is it an anchor?
227
+ if ch == u'&':
228
+ return self.fetch_anchor()
229
+
230
+ # Is it a tag?
231
+ if ch == u'!':
232
+ return self.fetch_tag()
233
+
234
+ # Is it a literal scalar?
235
+ if ch == u'|' and not self.flow_level:
236
+ return self.fetch_literal()
237
+
238
+ # Is it a folded scalar?
239
+ if ch == u'>' and not self.flow_level:
240
+ return self.fetch_folded()
241
+
242
+ # Is it a single quoted scalar?
243
+ if ch == u'\'':
244
+ return self.fetch_single()
245
+
246
+ # Is it a double quoted scalar?
247
+ if ch == u'\"':
248
+ return self.fetch_double()
249
+
250
+ # It must be a plain scalar then.
251
+ if self.check_plain():
252
+ return self.fetch_plain()
253
+
254
+ # No? It's an error. Let's produce a nice error message.
255
+ raise ScannerError("while scanning for the next token", None,
256
+ "found character %r that cannot start any token"
257
+ % ch.encode('utf-8'), self.get_mark())
258
+
259
+ # Simple keys treatment.
260
+
261
+ def next_possible_simple_key(self):
262
+ # Return the number of the nearest possible simple key. Actually we
263
+ # don't need to loop through the whole dictionary. We may replace it
264
+ # with the following code:
265
+ # if not self.possible_simple_keys:
266
+ # return None
267
+ # return self.possible_simple_keys[
268
+ # min(self.possible_simple_keys.keys())].token_number
269
+ min_token_number = None
270
+ for level in self.possible_simple_keys:
271
+ key = self.possible_simple_keys[level]
272
+ if min_token_number is None or key.token_number < min_token_number:
273
+ min_token_number = key.token_number
274
+ return min_token_number
275
+
276
+ def stale_possible_simple_keys(self):
277
+ # Remove entries that are no longer possible simple keys. According to
278
+ # the YAML specification, simple keys
279
+ # - should be limited to a single line,
280
+ # - should be no longer than 1024 characters.
281
+ # Disabling this procedure will allow simple keys of any length and
282
+ # height (may cause problems if indentation is broken though).
283
+ for level in self.possible_simple_keys.keys():
284
+ key = self.possible_simple_keys[level]
285
+ if key.line != self.line \
286
+ or self.index-key.index > 1024:
287
+ if key.required:
288
+ raise ScannerError("while scanning a simple key", key.mark,
289
+ "could not find expected ':'", self.get_mark())
290
+ del self.possible_simple_keys[level]
291
+
292
+ def save_possible_simple_key(self):
293
+ # The next token may start a simple key. We check if it's possible
294
+ # and save its position. This function is called for
295
+ # ALIAS, ANCHOR, TAG, SCALAR(flow), '[', and '{'.
296
+
297
+ # Check if a simple key is required at the current position.
298
+ required = not self.flow_level and self.indent == self.column
299
+
300
+ # The next token might be a simple key. Let's save it's number and
301
+ # position.
302
+ if self.allow_simple_key:
303
+ self.remove_possible_simple_key()
304
+ token_number = self.tokens_taken+len(self.tokens)
305
+ key = SimpleKey(token_number, required,
306
+ self.index, self.line, self.column, self.get_mark())
307
+ self.possible_simple_keys[self.flow_level] = key
308
+
309
+ def remove_possible_simple_key(self):
310
+ # Remove the saved possible key position at the current flow level.
311
+ if self.flow_level in self.possible_simple_keys:
312
+ key = self.possible_simple_keys[self.flow_level]
313
+
314
+ if key.required:
315
+ raise ScannerError("while scanning a simple key", key.mark,
316
+ "could not find expected ':'", self.get_mark())
317
+
318
+ del self.possible_simple_keys[self.flow_level]
319
+
320
+ # Indentation functions.
321
+
322
+ def unwind_indent(self, column):
323
+
324
+ ## In flow context, tokens should respect indentation.
325
+ ## Actually the condition should be `self.indent >= column` according to
326
+ ## the spec. But this condition will prohibit intuitively correct
327
+ ## constructions such as
328
+ ## key : {
329
+ ## }
330
+ #if self.flow_level and self.indent > column:
331
+ # raise ScannerError(None, None,
332
+ # "invalid intendation or unclosed '[' or '{'",
333
+ # self.get_mark())
334
+
335
+ # In the flow context, indentation is ignored. We make the scanner less
336
+ # restrictive then specification requires.
337
+ if self.flow_level:
338
+ return
339
+
340
+ # In block context, we may need to issue the BLOCK-END tokens.
341
+ while self.indent > column:
342
+ mark = self.get_mark()
343
+ self.indent = self.indents.pop()
344
+ self.tokens.append(BlockEndToken(mark, mark))
345
+
346
+ def add_indent(self, column):
347
+ # Check if we need to increase indentation.
348
+ if self.indent < column:
349
+ self.indents.append(self.indent)
350
+ self.indent = column
351
+ return True
352
+ return False
353
+
354
+ # Fetchers.
355
+
356
+ def fetch_stream_start(self):
357
+ # We always add STREAM-START as the first token and STREAM-END as the
358
+ # last token.
359
+
360
+ # Read the token.
361
+ mark = self.get_mark()
362
+
363
+ # Add STREAM-START.
364
+ self.tokens.append(StreamStartToken(mark, mark,
365
+ encoding=self.encoding))
366
+
367
+
368
+ def fetch_stream_end(self):
369
+
370
+ # Set the current intendation to -1.
371
+ self.unwind_indent(-1)
372
+
373
+ # Reset simple keys.
374
+ self.remove_possible_simple_key()
375
+ self.allow_simple_key = False
376
+ self.possible_simple_keys = {}
377
+
378
+ # Read the token.
379
+ mark = self.get_mark()
380
+
381
+ # Add STREAM-END.
382
+ self.tokens.append(StreamEndToken(mark, mark))
383
+
384
+ # The steam is finished.
385
+ self.done = True
386
+
387
+ def fetch_directive(self):
388
+
389
+ # Set the current intendation to -1.
390
+ self.unwind_indent(-1)
391
+
392
+ # Reset simple keys.
393
+ self.remove_possible_simple_key()
394
+ self.allow_simple_key = False
395
+
396
+ # Scan and add DIRECTIVE.
397
+ self.tokens.append(self.scan_directive())
398
+
399
+ def fetch_document_start(self):
400
+ self.fetch_document_indicator(DocumentStartToken)
401
+
402
+ def fetch_document_end(self):
403
+ self.fetch_document_indicator(DocumentEndToken)
404
+
405
+ def fetch_document_indicator(self, TokenClass):
406
+
407
+ # Set the current intendation to -1.
408
+ self.unwind_indent(-1)
409
+
410
+ # Reset simple keys. Note that there could not be a block collection
411
+ # after '---'.
412
+ self.remove_possible_simple_key()
413
+ self.allow_simple_key = False
414
+
415
+ # Add DOCUMENT-START or DOCUMENT-END.
416
+ start_mark = self.get_mark()
417
+ self.forward(3)
418
+ end_mark = self.get_mark()
419
+ self.tokens.append(TokenClass(start_mark, end_mark))
420
+
421
+ def fetch_flow_sequence_start(self):
422
+ self.fetch_flow_collection_start(FlowSequenceStartToken)
423
+
424
+ def fetch_flow_mapping_start(self):
425
+ self.fetch_flow_collection_start(FlowMappingStartToken)
426
+
427
+ def fetch_flow_collection_start(self, TokenClass):
428
+
429
+ # '[' and '{' may start a simple key.
430
+ self.save_possible_simple_key()
431
+
432
+ # Increase the flow level.
433
+ self.flow_level += 1
434
+
435
+ # Simple keys are allowed after '[' and '{'.
436
+ self.allow_simple_key = True
437
+
438
+ # Add FLOW-SEQUENCE-START or FLOW-MAPPING-START.
439
+ start_mark = self.get_mark()
440
+ self.forward()
441
+ end_mark = self.get_mark()
442
+ self.tokens.append(TokenClass(start_mark, end_mark))
443
+
444
+ def fetch_flow_sequence_end(self):
445
+ self.fetch_flow_collection_end(FlowSequenceEndToken)
446
+
447
+ def fetch_flow_mapping_end(self):
448
+ self.fetch_flow_collection_end(FlowMappingEndToken)
449
+
450
+ def fetch_flow_collection_end(self, TokenClass):
451
+
452
+ # Reset possible simple key on the current level.
453
+ self.remove_possible_simple_key()
454
+
455
+ # Decrease the flow level.
456
+ self.flow_level -= 1
457
+
458
+ # No simple keys after ']' or '}'.
459
+ self.allow_simple_key = False
460
+
461
+ # Add FLOW-SEQUENCE-END or FLOW-MAPPING-END.
462
+ start_mark = self.get_mark()
463
+ self.forward()
464
+ end_mark = self.get_mark()
465
+ self.tokens.append(TokenClass(start_mark, end_mark))
466
+
467
+ def fetch_flow_entry(self):
468
+
469
+ # Simple keys are allowed after ','.
470
+ self.allow_simple_key = True
471
+
472
+ # Reset possible simple key on the current level.
473
+ self.remove_possible_simple_key()
474
+
475
+ # Add FLOW-ENTRY.
476
+ start_mark = self.get_mark()
477
+ self.forward()
478
+ end_mark = self.get_mark()
479
+ self.tokens.append(FlowEntryToken(start_mark, end_mark))
480
+
481
+ def fetch_block_entry(self):
482
+
483
+ # Block context needs additional checks.
484
+ if not self.flow_level:
485
+
486
+ # Are we allowed to start a new entry?
487
+ if not self.allow_simple_key:
488
+ raise ScannerError(None, None,
489
+ "sequence entries are not allowed here",
490
+ self.get_mark())
491
+
492
+ # We may need to add BLOCK-SEQUENCE-START.
493
+ if self.add_indent(self.column):
494
+ mark = self.get_mark()
495
+ self.tokens.append(BlockSequenceStartToken(mark, mark))
496
+
497
+ # It's an error for the block entry to occur in the flow context,
498
+ # but we let the parser detect this.
499
+ else:
500
+ pass
501
+
502
+ # Simple keys are allowed after '-'.
503
+ self.allow_simple_key = True
504
+
505
+ # Reset possible simple key on the current level.
506
+ self.remove_possible_simple_key()
507
+
508
+ # Add BLOCK-ENTRY.
509
+ start_mark = self.get_mark()
510
+ self.forward()
511
+ end_mark = self.get_mark()
512
+ self.tokens.append(BlockEntryToken(start_mark, end_mark))
513
+
514
+ def fetch_key(self):
515
+
516
+ # Block context needs additional checks.
517
+ if not self.flow_level:
518
+
519
+ # Are we allowed to start a key (not nessesary a simple)?
520
+ if not self.allow_simple_key:
521
+ raise ScannerError(None, None,
522
+ "mapping keys are not allowed here",
523
+ self.get_mark())
524
+
525
+ # We may need to add BLOCK-MAPPING-START.
526
+ if self.add_indent(self.column):
527
+ mark = self.get_mark()
528
+ self.tokens.append(BlockMappingStartToken(mark, mark))
529
+
530
+ # Simple keys are allowed after '?' in the block context.
531
+ self.allow_simple_key = not self.flow_level
532
+
533
+ # Reset possible simple key on the current level.
534
+ self.remove_possible_simple_key()
535
+
536
+ # Add KEY.
537
+ start_mark = self.get_mark()
538
+ self.forward()
539
+ end_mark = self.get_mark()
540
+ self.tokens.append(KeyToken(start_mark, end_mark))
541
+
542
+ def fetch_value(self):
543
+
544
+ # Do we determine a simple key?
545
+ if self.flow_level in self.possible_simple_keys:
546
+
547
+ # Add KEY.
548
+ key = self.possible_simple_keys[self.flow_level]
549
+ del self.possible_simple_keys[self.flow_level]
550
+ self.tokens.insert(key.token_number-self.tokens_taken,
551
+ KeyToken(key.mark, key.mark))
552
+
553
+ # If this key starts a new block mapping, we need to add
554
+ # BLOCK-MAPPING-START.
555
+ if not self.flow_level:
556
+ if self.add_indent(key.column):
557
+ self.tokens.insert(key.token_number-self.tokens_taken,
558
+ BlockMappingStartToken(key.mark, key.mark))
559
+
560
+ # There cannot be two simple keys one after another.
561
+ self.allow_simple_key = False
562
+
563
+ # It must be a part of a complex key.
564
+ else:
565
+
566
+ # Block context needs additional checks.
567
+ # (Do we really need them? They will be catched by the parser
568
+ # anyway.)
569
+ if not self.flow_level:
570
+
571
+ # We are allowed to start a complex value if and only if
572
+ # we can start a simple key.
573
+ if not self.allow_simple_key:
574
+ raise ScannerError(None, None,
575
+ "mapping values are not allowed here",
576
+ self.get_mark())
577
+
578
+ # If this value starts a new block mapping, we need to add
579
+ # BLOCK-MAPPING-START. It will be detected as an error later by
580
+ # the parser.
581
+ if not self.flow_level:
582
+ if self.add_indent(self.column):
583
+ mark = self.get_mark()
584
+ self.tokens.append(BlockMappingStartToken(mark, mark))
585
+
586
+ # Simple keys are allowed after ':' in the block context.
587
+ self.allow_simple_key = not self.flow_level
588
+
589
+ # Reset possible simple key on the current level.
590
+ self.remove_possible_simple_key()
591
+
592
+ # Add VALUE.
593
+ start_mark = self.get_mark()
594
+ self.forward()
595
+ end_mark = self.get_mark()
596
+ self.tokens.append(ValueToken(start_mark, end_mark))
597
+
598
+ def fetch_alias(self):
599
+
600
+ # ALIAS could be a simple key.
601
+ self.save_possible_simple_key()
602
+
603
+ # No simple keys after ALIAS.
604
+ self.allow_simple_key = False
605
+
606
+ # Scan and add ALIAS.
607
+ self.tokens.append(self.scan_anchor(AliasToken))
608
+
609
+ def fetch_anchor(self):
610
+
611
+ # ANCHOR could start a simple key.
612
+ self.save_possible_simple_key()
613
+
614
+ # No simple keys after ANCHOR.
615
+ self.allow_simple_key = False
616
+
617
+ # Scan and add ANCHOR.
618
+ self.tokens.append(self.scan_anchor(AnchorToken))
619
+
620
+ def fetch_tag(self):
621
+
622
+ # TAG could start a simple key.
623
+ self.save_possible_simple_key()
624
+
625
+ # No simple keys after TAG.
626
+ self.allow_simple_key = False
627
+
628
+ # Scan and add TAG.
629
+ self.tokens.append(self.scan_tag())
630
+
631
+ def fetch_literal(self):
632
+ self.fetch_block_scalar(style='|')
633
+
634
+ def fetch_folded(self):
635
+ self.fetch_block_scalar(style='>')
636
+
637
+ def fetch_block_scalar(self, style):
638
+
639
+ # A simple key may follow a block scalar.
640
+ self.allow_simple_key = True
641
+
642
+ # Reset possible simple key on the current level.
643
+ self.remove_possible_simple_key()
644
+
645
+ # Scan and add SCALAR.
646
+ self.tokens.append(self.scan_block_scalar(style))
647
+
648
+ def fetch_single(self):
649
+ self.fetch_flow_scalar(style='\'')
650
+
651
+ def fetch_double(self):
652
+ self.fetch_flow_scalar(style='"')
653
+
654
+ def fetch_flow_scalar(self, style):
655
+
656
+ # A flow scalar could be a simple key.
657
+ self.save_possible_simple_key()
658
+
659
+ # No simple keys after flow scalars.
660
+ self.allow_simple_key = False
661
+
662
+ # Scan and add SCALAR.
663
+ self.tokens.append(self.scan_flow_scalar(style))
664
+
665
+ def fetch_plain(self):
666
+
667
+ # A plain scalar could be a simple key.
668
+ self.save_possible_simple_key()
669
+
670
+ # No simple keys after plain scalars. But note that `scan_plain` will
671
+ # change this flag if the scan is finished at the beginning of the
672
+ # line.
673
+ self.allow_simple_key = False
674
+
675
+ # Scan and add SCALAR. May change `allow_simple_key`.
676
+ self.tokens.append(self.scan_plain())
677
+
678
+ # Checkers.
679
+
680
+ def check_directive(self):
681
+
682
+ # DIRECTIVE: ^ '%' ...
683
+ # The '%' indicator is already checked.
684
+ if self.column == 0:
685
+ return True
686
+
687
+ def check_document_start(self):
688
+
689
+ # DOCUMENT-START: ^ '---' (' '|'\n')
690
+ if self.column == 0:
691
+ if self.prefix(3) == u'---' \
692
+ and self.peek(3) in u'\0 \t\r\n\x85\u2028\u2029':
693
+ return True
694
+
695
+ def check_document_end(self):
696
+
697
+ # DOCUMENT-END: ^ '...' (' '|'\n')
698
+ if self.column == 0:
699
+ if self.prefix(3) == u'...' \
700
+ and self.peek(3) in u'\0 \t\r\n\x85\u2028\u2029':
701
+ return True
702
+
703
+ def check_block_entry(self):
704
+
705
+ # BLOCK-ENTRY: '-' (' '|'\n')
706
+ return self.peek(1) in u'\0 \t\r\n\x85\u2028\u2029'
707
+
708
+ def check_key(self):
709
+
710
+ # KEY(flow context): '?'
711
+ if self.flow_level:
712
+ return True
713
+
714
+ # KEY(block context): '?' (' '|'\n')
715
+ else:
716
+ return self.peek(1) in u'\0 \t\r\n\x85\u2028\u2029'
717
+
718
+ def check_value(self):
719
+
720
+ # VALUE(flow context): ':'
721
+ if self.flow_level:
722
+ return True
723
+
724
+ # VALUE(block context): ':' (' '|'\n')
725
+ else:
726
+ return self.peek(1) in u'\0 \t\r\n\x85\u2028\u2029'
727
+
728
+ def check_plain(self):
729
+
730
+ # A plain scalar may start with any non-space character except:
731
+ # '-', '?', ':', ',', '[', ']', '{', '}',
732
+ # '#', '&', '*', '!', '|', '>', '\'', '\"',
733
+ # '%', '@', '`'.
734
+ #
735
+ # It may also start with
736
+ # '-', '?', ':'
737
+ # if it is followed by a non-space character.
738
+ #
739
+ # Note that we limit the last rule to the block context (except the
740
+ # '-' character) because we want the flow context to be space
741
+ # independent.
742
+ ch = self.peek()
743
+ return ch not in u'\0 \t\r\n\x85\u2028\u2029-?:,[]{}#&*!|>\'\"%@`' \
744
+ or (self.peek(1) not in u'\0 \t\r\n\x85\u2028\u2029'
745
+ and (ch == u'-' or (not self.flow_level and ch in u'?:')))
746
+
747
+ # Scanners.
748
+
749
+ def scan_to_next_token(self):
750
+ # We ignore spaces, line breaks and comments.
751
+ # If we find a line break in the block context, we set the flag
752
+ # `allow_simple_key` on.
753
+ # The byte order mark is stripped if it's the first character in the
754
+ # stream. We do not yet support BOM inside the stream as the
755
+ # specification requires. Any such mark will be considered as a part
756
+ # of the document.
757
+ #
758
+ # TODO: We need to make tab handling rules more sane. A good rule is
759
+ # Tabs cannot precede tokens
760
+ # BLOCK-SEQUENCE-START, BLOCK-MAPPING-START, BLOCK-END,
761
+ # KEY(block), VALUE(block), BLOCK-ENTRY
762
+ # So the checking code is
763
+ # if <TAB>:
764
+ # self.allow_simple_keys = False
765
+ # We also need to add the check for `allow_simple_keys == True` to
766
+ # `unwind_indent` before issuing BLOCK-END.
767
+ # Scanners for block, flow, and plain scalars need to be modified.
768
+
769
+ if self.index == 0 and self.peek() == u'\uFEFF':
770
+ self.forward()
771
+ found = False
772
+ while not found:
773
+ while self.peek() == u' ':
774
+ self.forward()
775
+ if self.peek() == u'#':
776
+ while self.peek() not in u'\0\r\n\x85\u2028\u2029':
777
+ self.forward()
778
+ if self.scan_line_break():
779
+ if not self.flow_level:
780
+ self.allow_simple_key = True
781
+ else:
782
+ found = True
783
+
784
+ def scan_directive(self):
785
+ # See the specification for details.
786
+ start_mark = self.get_mark()
787
+ self.forward()
788
+ name = self.scan_directive_name(start_mark)
789
+ value = None
790
+ if name == u'YAML':
791
+ value = self.scan_yaml_directive_value(start_mark)
792
+ end_mark = self.get_mark()
793
+ elif name == u'TAG':
794
+ value = self.scan_tag_directive_value(start_mark)
795
+ end_mark = self.get_mark()
796
+ else:
797
+ end_mark = self.get_mark()
798
+ while self.peek() not in u'\0\r\n\x85\u2028\u2029':
799
+ self.forward()
800
+ self.scan_directive_ignored_line(start_mark)
801
+ return DirectiveToken(name, value, start_mark, end_mark)
802
+
803
+ def scan_directive_name(self, start_mark):
804
+ # See the specification for details.
805
+ length = 0
806
+ ch = self.peek(length)
807
+ while u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \
808
+ or ch in u'-_':
809
+ length += 1
810
+ ch = self.peek(length)
811
+ if not length:
812
+ raise ScannerError("while scanning a directive", start_mark,
813
+ "expected alphabetic or numeric character, but found %r"
814
+ % ch.encode('utf-8'), self.get_mark())
815
+ value = self.prefix(length)
816
+ self.forward(length)
817
+ ch = self.peek()
818
+ if ch not in u'\0 \r\n\x85\u2028\u2029':
819
+ raise ScannerError("while scanning a directive", start_mark,
820
+ "expected alphabetic or numeric character, but found %r"
821
+ % ch.encode('utf-8'), self.get_mark())
822
+ return value
823
+
824
+ def scan_yaml_directive_value(self, start_mark):
825
+ # See the specification for details.
826
+ while self.peek() == u' ':
827
+ self.forward()
828
+ major = self.scan_yaml_directive_number(start_mark)
829
+ if self.peek() != '.':
830
+ raise ScannerError("while scanning a directive", start_mark,
831
+ "expected a digit or '.', but found %r"
832
+ % self.peek().encode('utf-8'),
833
+ self.get_mark())
834
+ self.forward()
835
+ minor = self.scan_yaml_directive_number(start_mark)
836
+ if self.peek() not in u'\0 \r\n\x85\u2028\u2029':
837
+ raise ScannerError("while scanning a directive", start_mark,
838
+ "expected a digit or ' ', but found %r"
839
+ % self.peek().encode('utf-8'),
840
+ self.get_mark())
841
+ return (major, minor)
842
+
843
+ def scan_yaml_directive_number(self, start_mark):
844
+ # See the specification for details.
845
+ ch = self.peek()
846
+ if not (u'0' <= ch <= u'9'):
847
+ raise ScannerError("while scanning a directive", start_mark,
848
+ "expected a digit, but found %r" % ch.encode('utf-8'),
849
+ self.get_mark())
850
+ length = 0
851
+ while u'0' <= self.peek(length) <= u'9':
852
+ length += 1
853
+ value = int(self.prefix(length))
854
+ self.forward(length)
855
+ return value
856
+
857
+ def scan_tag_directive_value(self, start_mark):
858
+ # See the specification for details.
859
+ while self.peek() == u' ':
860
+ self.forward()
861
+ handle = self.scan_tag_directive_handle(start_mark)
862
+ while self.peek() == u' ':
863
+ self.forward()
864
+ prefix = self.scan_tag_directive_prefix(start_mark)
865
+ return (handle, prefix)
866
+
867
+ def scan_tag_directive_handle(self, start_mark):
868
+ # See the specification for details.
869
+ value = self.scan_tag_handle('directive', start_mark)
870
+ ch = self.peek()
871
+ if ch != u' ':
872
+ raise ScannerError("while scanning a directive", start_mark,
873
+ "expected ' ', but found %r" % ch.encode('utf-8'),
874
+ self.get_mark())
875
+ return value
876
+
877
+ def scan_tag_directive_prefix(self, start_mark):
878
+ # See the specification for details.
879
+ value = self.scan_tag_uri('directive', start_mark)
880
+ ch = self.peek()
881
+ if ch not in u'\0 \r\n\x85\u2028\u2029':
882
+ raise ScannerError("while scanning a directive", start_mark,
883
+ "expected ' ', but found %r" % ch.encode('utf-8'),
884
+ self.get_mark())
885
+ return value
886
+
887
+ def scan_directive_ignored_line(self, start_mark):
888
+ # See the specification for details.
889
+ while self.peek() == u' ':
890
+ self.forward()
891
+ if self.peek() == u'#':
892
+ while self.peek() not in u'\0\r\n\x85\u2028\u2029':
893
+ self.forward()
894
+ ch = self.peek()
895
+ if ch not in u'\0\r\n\x85\u2028\u2029':
896
+ raise ScannerError("while scanning a directive", start_mark,
897
+ "expected a comment or a line break, but found %r"
898
+ % ch.encode('utf-8'), self.get_mark())
899
+ self.scan_line_break()
900
+
901
+ def scan_anchor(self, TokenClass):
902
+ # The specification does not restrict characters for anchors and
903
+ # aliases. This may lead to problems, for instance, the document:
904
+ # [ *alias, value ]
905
+ # can be interpteted in two ways, as
906
+ # [ "value" ]
907
+ # and
908
+ # [ *alias , "value" ]
909
+ # Therefore we restrict aliases to numbers and ASCII letters.
910
+ start_mark = self.get_mark()
911
+ indicator = self.peek()
912
+ if indicator == u'*':
913
+ name = 'alias'
914
+ else:
915
+ name = 'anchor'
916
+ self.forward()
917
+ length = 0
918
+ ch = self.peek(length)
919
+ while u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \
920
+ or ch in u'-_':
921
+ length += 1
922
+ ch = self.peek(length)
923
+ if not length:
924
+ raise ScannerError("while scanning an %s" % name, start_mark,
925
+ "expected alphabetic or numeric character, but found %r"
926
+ % ch.encode('utf-8'), self.get_mark())
927
+ value = self.prefix(length)
928
+ self.forward(length)
929
+ ch = self.peek()
930
+ if ch not in u'\0 \t\r\n\x85\u2028\u2029?:,]}%@`':
931
+ raise ScannerError("while scanning an %s" % name, start_mark,
932
+ "expected alphabetic or numeric character, but found %r"
933
+ % ch.encode('utf-8'), self.get_mark())
934
+ end_mark = self.get_mark()
935
+ return TokenClass(value, start_mark, end_mark)
936
+
937
+ def scan_tag(self):
938
+ # See the specification for details.
939
+ start_mark = self.get_mark()
940
+ ch = self.peek(1)
941
+ if ch == u'<':
942
+ handle = None
943
+ self.forward(2)
944
+ suffix = self.scan_tag_uri('tag', start_mark)
945
+ if self.peek() != u'>':
946
+ raise ScannerError("while parsing a tag", start_mark,
947
+ "expected '>', but found %r" % self.peek().encode('utf-8'),
948
+ self.get_mark())
949
+ self.forward()
950
+ elif ch in u'\0 \t\r\n\x85\u2028\u2029':
951
+ handle = None
952
+ suffix = u'!'
953
+ self.forward()
954
+ else:
955
+ length = 1
956
+ use_handle = False
957
+ while ch not in u'\0 \r\n\x85\u2028\u2029':
958
+ if ch == u'!':
959
+ use_handle = True
960
+ break
961
+ length += 1
962
+ ch = self.peek(length)
963
+ handle = u'!'
964
+ if use_handle:
965
+ handle = self.scan_tag_handle('tag', start_mark)
966
+ else:
967
+ handle = u'!'
968
+ self.forward()
969
+ suffix = self.scan_tag_uri('tag', start_mark)
970
+ ch = self.peek()
971
+ if ch not in u'\0 \r\n\x85\u2028\u2029':
972
+ raise ScannerError("while scanning a tag", start_mark,
973
+ "expected ' ', but found %r" % ch.encode('utf-8'),
974
+ self.get_mark())
975
+ value = (handle, suffix)
976
+ end_mark = self.get_mark()
977
+ return TagToken(value, start_mark, end_mark)
978
+
979
+ def scan_block_scalar(self, style):
980
+ # See the specification for details.
981
+
982
+ if style == '>':
983
+ folded = True
984
+ else:
985
+ folded = False
986
+
987
+ chunks = []
988
+ start_mark = self.get_mark()
989
+
990
+ # Scan the header.
991
+ self.forward()
992
+ chomping, increment = self.scan_block_scalar_indicators(start_mark)
993
+ self.scan_block_scalar_ignored_line(start_mark)
994
+
995
+ # Determine the indentation level and go to the first non-empty line.
996
+ min_indent = self.indent+1
997
+ if min_indent < 1:
998
+ min_indent = 1
999
+ if increment is None:
1000
+ breaks, max_indent, end_mark = self.scan_block_scalar_indentation()
1001
+ indent = max(min_indent, max_indent)
1002
+ else:
1003
+ indent = min_indent+increment-1
1004
+ breaks, end_mark = self.scan_block_scalar_breaks(indent)
1005
+ line_break = u''
1006
+
1007
+ # Scan the inner part of the block scalar.
1008
+ while self.column == indent and self.peek() != u'\0':
1009
+ chunks.extend(breaks)
1010
+ leading_non_space = self.peek() not in u' \t'
1011
+ length = 0
1012
+ while self.peek(length) not in u'\0\r\n\x85\u2028\u2029':
1013
+ length += 1
1014
+ chunks.append(self.prefix(length))
1015
+ self.forward(length)
1016
+ line_break = self.scan_line_break()
1017
+ breaks, end_mark = self.scan_block_scalar_breaks(indent)
1018
+ if self.column == indent and self.peek() != u'\0':
1019
+
1020
+ # Unfortunately, folding rules are ambiguous.
1021
+ #
1022
+ # This is the folding according to the specification:
1023
+
1024
+ if folded and line_break == u'\n' \
1025
+ and leading_non_space and self.peek() not in u' \t':
1026
+ if not breaks:
1027
+ chunks.append(u' ')
1028
+ else:
1029
+ chunks.append(line_break)
1030
+
1031
+ # This is Clark Evans's interpretation (also in the spec
1032
+ # examples):
1033
+ #
1034
+ #if folded and line_break == u'\n':
1035
+ # if not breaks:
1036
+ # if self.peek() not in ' \t':
1037
+ # chunks.append(u' ')
1038
+ # else:
1039
+ # chunks.append(line_break)
1040
+ #else:
1041
+ # chunks.append(line_break)
1042
+ else:
1043
+ break
1044
+
1045
+ # Chomp the tail.
1046
+ if chomping is not False:
1047
+ chunks.append(line_break)
1048
+ if chomping is True:
1049
+ chunks.extend(breaks)
1050
+
1051
+ # We are done.
1052
+ return ScalarToken(u''.join(chunks), False, start_mark, end_mark,
1053
+ style)
1054
+
1055
+ def scan_block_scalar_indicators(self, start_mark):
1056
+ # See the specification for details.
1057
+ chomping = None
1058
+ increment = None
1059
+ ch = self.peek()
1060
+ if ch in u'+-':
1061
+ if ch == '+':
1062
+ chomping = True
1063
+ else:
1064
+ chomping = False
1065
+ self.forward()
1066
+ ch = self.peek()
1067
+ if ch in u'0123456789':
1068
+ increment = int(ch)
1069
+ if increment == 0:
1070
+ raise ScannerError("while scanning a block scalar", start_mark,
1071
+ "expected indentation indicator in the range 1-9, but found 0",
1072
+ self.get_mark())
1073
+ self.forward()
1074
+ elif ch in u'0123456789':
1075
+ increment = int(ch)
1076
+ if increment == 0:
1077
+ raise ScannerError("while scanning a block scalar", start_mark,
1078
+ "expected indentation indicator in the range 1-9, but found 0",
1079
+ self.get_mark())
1080
+ self.forward()
1081
+ ch = self.peek()
1082
+ if ch in u'+-':
1083
+ if ch == '+':
1084
+ chomping = True
1085
+ else:
1086
+ chomping = False
1087
+ self.forward()
1088
+ ch = self.peek()
1089
+ if ch not in u'\0 \r\n\x85\u2028\u2029':
1090
+ raise ScannerError("while scanning a block scalar", start_mark,
1091
+ "expected chomping or indentation indicators, but found %r"
1092
+ % ch.encode('utf-8'), self.get_mark())
1093
+ return chomping, increment
1094
+
1095
+ def scan_block_scalar_ignored_line(self, start_mark):
1096
+ # See the specification for details.
1097
+ while self.peek() == u' ':
1098
+ self.forward()
1099
+ if self.peek() == u'#':
1100
+ while self.peek() not in u'\0\r\n\x85\u2028\u2029':
1101
+ self.forward()
1102
+ ch = self.peek()
1103
+ if ch not in u'\0\r\n\x85\u2028\u2029':
1104
+ raise ScannerError("while scanning a block scalar", start_mark,
1105
+ "expected a comment or a line break, but found %r"
1106
+ % ch.encode('utf-8'), self.get_mark())
1107
+ self.scan_line_break()
1108
+
1109
+ def scan_block_scalar_indentation(self):
1110
+ # See the specification for details.
1111
+ chunks = []
1112
+ max_indent = 0
1113
+ end_mark = self.get_mark()
1114
+ while self.peek() in u' \r\n\x85\u2028\u2029':
1115
+ if self.peek() != u' ':
1116
+ chunks.append(self.scan_line_break())
1117
+ end_mark = self.get_mark()
1118
+ else:
1119
+ self.forward()
1120
+ if self.column > max_indent:
1121
+ max_indent = self.column
1122
+ return chunks, max_indent, end_mark
1123
+
1124
+ def scan_block_scalar_breaks(self, indent):
1125
+ # See the specification for details.
1126
+ chunks = []
1127
+ end_mark = self.get_mark()
1128
+ while self.column < indent and self.peek() == u' ':
1129
+ self.forward()
1130
+ while self.peek() in u'\r\n\x85\u2028\u2029':
1131
+ chunks.append(self.scan_line_break())
1132
+ end_mark = self.get_mark()
1133
+ while self.column < indent and self.peek() == u' ':
1134
+ self.forward()
1135
+ return chunks, end_mark
1136
+
1137
+ def scan_flow_scalar(self, style):
1138
+ # See the specification for details.
1139
+ # Note that we loose indentation rules for quoted scalars. Quoted
1140
+ # scalars don't need to adhere indentation because " and ' clearly
1141
+ # mark the beginning and the end of them. Therefore we are less
1142
+ # restrictive then the specification requires. We only need to check
1143
+ # that document separators are not included in scalars.
1144
+ if style == '"':
1145
+ double = True
1146
+ else:
1147
+ double = False
1148
+ chunks = []
1149
+ start_mark = self.get_mark()
1150
+ quote = self.peek()
1151
+ self.forward()
1152
+ chunks.extend(self.scan_flow_scalar_non_spaces(double, start_mark))
1153
+ while self.peek() != quote:
1154
+ chunks.extend(self.scan_flow_scalar_spaces(double, start_mark))
1155
+ chunks.extend(self.scan_flow_scalar_non_spaces(double, start_mark))
1156
+ self.forward()
1157
+ end_mark = self.get_mark()
1158
+ return ScalarToken(u''.join(chunks), False, start_mark, end_mark,
1159
+ style)
1160
+
1161
+ ESCAPE_REPLACEMENTS = {
1162
+ u'0': u'\0',
1163
+ u'a': u'\x07',
1164
+ u'b': u'\x08',
1165
+ u't': u'\x09',
1166
+ u'\t': u'\x09',
1167
+ u'n': u'\x0A',
1168
+ u'v': u'\x0B',
1169
+ u'f': u'\x0C',
1170
+ u'r': u'\x0D',
1171
+ u'e': u'\x1B',
1172
+ u' ': u'\x20',
1173
+ u'\"': u'\"',
1174
+ u'\\': u'\\',
1175
+ u'N': u'\x85',
1176
+ u'_': u'\xA0',
1177
+ u'L': u'\u2028',
1178
+ u'P': u'\u2029',
1179
+ }
1180
+
1181
+ ESCAPE_CODES = {
1182
+ u'x': 2,
1183
+ u'u': 4,
1184
+ u'U': 8,
1185
+ }
1186
+
1187
+ def scan_flow_scalar_non_spaces(self, double, start_mark):
1188
+ # See the specification for details.
1189
+ chunks = []
1190
+ while True:
1191
+ length = 0
1192
+ while self.peek(length) not in u'\'\"\\\0 \t\r\n\x85\u2028\u2029':
1193
+ length += 1
1194
+ if length:
1195
+ chunks.append(self.prefix(length))
1196
+ self.forward(length)
1197
+ ch = self.peek()
1198
+ if not double and ch == u'\'' and self.peek(1) == u'\'':
1199
+ chunks.append(u'\'')
1200
+ self.forward(2)
1201
+ elif (double and ch == u'\'') or (not double and ch in u'\"\\'):
1202
+ chunks.append(ch)
1203
+ self.forward()
1204
+ elif double and ch == u'\\':
1205
+ self.forward()
1206
+ ch = self.peek()
1207
+ if ch in self.ESCAPE_REPLACEMENTS:
1208
+ chunks.append(self.ESCAPE_REPLACEMENTS[ch])
1209
+ self.forward()
1210
+ elif ch in self.ESCAPE_CODES:
1211
+ length = self.ESCAPE_CODES[ch]
1212
+ self.forward()
1213
+ for k in range(length):
1214
+ if self.peek(k) not in u'0123456789ABCDEFabcdef':
1215
+ raise ScannerError("while scanning a double-quoted scalar", start_mark,
1216
+ "expected escape sequence of %d hexdecimal numbers, but found %r" %
1217
+ (length, self.peek(k).encode('utf-8')), self.get_mark())
1218
+ code = int(self.prefix(length), 16)
1219
+ chunks.append(unichr(code))
1220
+ self.forward(length)
1221
+ elif ch in u'\r\n\x85\u2028\u2029':
1222
+ self.scan_line_break()
1223
+ chunks.extend(self.scan_flow_scalar_breaks(double, start_mark))
1224
+ else:
1225
+ raise ScannerError("while scanning a double-quoted scalar", start_mark,
1226
+ "found unknown escape character %r" % ch.encode('utf-8'), self.get_mark())
1227
+ else:
1228
+ return chunks
1229
+
1230
+ def scan_flow_scalar_spaces(self, double, start_mark):
1231
+ # See the specification for details.
1232
+ chunks = []
1233
+ length = 0
1234
+ while self.peek(length) in u' \t':
1235
+ length += 1
1236
+ whitespaces = self.prefix(length)
1237
+ self.forward(length)
1238
+ ch = self.peek()
1239
+ if ch == u'\0':
1240
+ raise ScannerError("while scanning a quoted scalar", start_mark,
1241
+ "found unexpected end of stream", self.get_mark())
1242
+ elif ch in u'\r\n\x85\u2028\u2029':
1243
+ line_break = self.scan_line_break()
1244
+ breaks = self.scan_flow_scalar_breaks(double, start_mark)
1245
+ if line_break != u'\n':
1246
+ chunks.append(line_break)
1247
+ elif not breaks:
1248
+ chunks.append(u' ')
1249
+ chunks.extend(breaks)
1250
+ else:
1251
+ chunks.append(whitespaces)
1252
+ return chunks
1253
+
1254
+ def scan_flow_scalar_breaks(self, double, start_mark):
1255
+ # See the specification for details.
1256
+ chunks = []
1257
+ while True:
1258
+ # Instead of checking indentation, we check for document
1259
+ # separators.
1260
+ prefix = self.prefix(3)
1261
+ if (prefix == u'---' or prefix == u'...') \
1262
+ and self.peek(3) in u'\0 \t\r\n\x85\u2028\u2029':
1263
+ raise ScannerError("while scanning a quoted scalar", start_mark,
1264
+ "found unexpected document separator", self.get_mark())
1265
+ while self.peek() in u' \t':
1266
+ self.forward()
1267
+ if self.peek() in u'\r\n\x85\u2028\u2029':
1268
+ chunks.append(self.scan_line_break())
1269
+ else:
1270
+ return chunks
1271
+
1272
+ def scan_plain(self):
1273
+ # See the specification for details.
1274
+ # We add an additional restriction for the flow context:
1275
+ # plain scalars in the flow context cannot contain ',', ':' and '?'.
1276
+ # We also keep track of the `allow_simple_key` flag here.
1277
+ # Indentation rules are loosed for the flow context.
1278
+ chunks = []
1279
+ start_mark = self.get_mark()
1280
+ end_mark = start_mark
1281
+ indent = self.indent+1
1282
+ # We allow zero indentation for scalars, but then we need to check for
1283
+ # document separators at the beginning of the line.
1284
+ #if indent == 0:
1285
+ # indent = 1
1286
+ spaces = []
1287
+ while True:
1288
+ length = 0
1289
+ if self.peek() == u'#':
1290
+ break
1291
+ while True:
1292
+ ch = self.peek(length)
1293
+ if ch in u'\0 \t\r\n\x85\u2028\u2029' \
1294
+ or (not self.flow_level and ch == u':' and
1295
+ self.peek(length+1) in u'\0 \t\r\n\x85\u2028\u2029') \
1296
+ or (self.flow_level and ch in u',:?[]{}'):
1297
+ break
1298
+ length += 1
1299
+ # It's not clear what we should do with ':' in the flow context.
1300
+ if (self.flow_level and ch == u':'
1301
+ and self.peek(length+1) not in u'\0 \t\r\n\x85\u2028\u2029,[]{}'):
1302
+ self.forward(length)
1303
+ raise ScannerError("while scanning a plain scalar", start_mark,
1304
+ "found unexpected ':'", self.get_mark(),
1305
+ "Please check http://pyyaml.org/wiki/YAMLColonInFlowContext for details.")
1306
+ if length == 0:
1307
+ break
1308
+ self.allow_simple_key = False
1309
+ chunks.extend(spaces)
1310
+ chunks.append(self.prefix(length))
1311
+ self.forward(length)
1312
+ end_mark = self.get_mark()
1313
+ spaces = self.scan_plain_spaces(indent, start_mark)
1314
+ if not spaces or self.peek() == u'#' \
1315
+ or (not self.flow_level and self.column < indent):
1316
+ break
1317
+ return ScalarToken(u''.join(chunks), True, start_mark, end_mark)
1318
+
1319
+ def scan_plain_spaces(self, indent, start_mark):
1320
+ # See the specification for details.
1321
+ # The specification is really confusing about tabs in plain scalars.
1322
+ # We just forbid them completely. Do not use tabs in YAML!
1323
+ chunks = []
1324
+ length = 0
1325
+ while self.peek(length) in u' ':
1326
+ length += 1
1327
+ whitespaces = self.prefix(length)
1328
+ self.forward(length)
1329
+ ch = self.peek()
1330
+ if ch in u'\r\n\x85\u2028\u2029':
1331
+ line_break = self.scan_line_break()
1332
+ self.allow_simple_key = True
1333
+ prefix = self.prefix(3)
1334
+ if (prefix == u'---' or prefix == u'...') \
1335
+ and self.peek(3) in u'\0 \t\r\n\x85\u2028\u2029':
1336
+ return
1337
+ breaks = []
1338
+ while self.peek() in u' \r\n\x85\u2028\u2029':
1339
+ if self.peek() == ' ':
1340
+ self.forward()
1341
+ else:
1342
+ breaks.append(self.scan_line_break())
1343
+ prefix = self.prefix(3)
1344
+ if (prefix == u'---' or prefix == u'...') \
1345
+ and self.peek(3) in u'\0 \t\r\n\x85\u2028\u2029':
1346
+ return
1347
+ if line_break != u'\n':
1348
+ chunks.append(line_break)
1349
+ elif not breaks:
1350
+ chunks.append(u' ')
1351
+ chunks.extend(breaks)
1352
+ elif whitespaces:
1353
+ chunks.append(whitespaces)
1354
+ return chunks
1355
+
1356
+ def scan_tag_handle(self, name, start_mark):
1357
+ # See the specification for details.
1358
+ # For some strange reasons, the specification does not allow '_' in
1359
+ # tag handles. I have allowed it anyway.
1360
+ ch = self.peek()
1361
+ if ch != u'!':
1362
+ raise ScannerError("while scanning a %s" % name, start_mark,
1363
+ "expected '!', but found %r" % ch.encode('utf-8'),
1364
+ self.get_mark())
1365
+ length = 1
1366
+ ch = self.peek(length)
1367
+ if ch != u' ':
1368
+ while u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \
1369
+ or ch in u'-_':
1370
+ length += 1
1371
+ ch = self.peek(length)
1372
+ if ch != u'!':
1373
+ self.forward(length)
1374
+ raise ScannerError("while scanning a %s" % name, start_mark,
1375
+ "expected '!', but found %r" % ch.encode('utf-8'),
1376
+ self.get_mark())
1377
+ length += 1
1378
+ value = self.prefix(length)
1379
+ self.forward(length)
1380
+ return value
1381
+
1382
+ def scan_tag_uri(self, name, start_mark):
1383
+ # See the specification for details.
1384
+ # Note: we do not check if URI is well-formed.
1385
+ chunks = []
1386
+ length = 0
1387
+ ch = self.peek(length)
1388
+ while u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \
1389
+ or ch in u'-;/?:@&=+$,_.!~*\'()[]%':
1390
+ if ch == u'%':
1391
+ chunks.append(self.prefix(length))
1392
+ self.forward(length)
1393
+ length = 0
1394
+ chunks.append(self.scan_uri_escapes(name, start_mark))
1395
+ else:
1396
+ length += 1
1397
+ ch = self.peek(length)
1398
+ if length:
1399
+ chunks.append(self.prefix(length))
1400
+ self.forward(length)
1401
+ length = 0
1402
+ if not chunks:
1403
+ raise ScannerError("while parsing a %s" % name, start_mark,
1404
+ "expected URI, but found %r" % ch.encode('utf-8'),
1405
+ self.get_mark())
1406
+ return u''.join(chunks)
1407
+
1408
+ def scan_uri_escapes(self, name, start_mark):
1409
+ # See the specification for details.
1410
+ bytes = []
1411
+ mark = self.get_mark()
1412
+ while self.peek() == u'%':
1413
+ self.forward()
1414
+ for k in range(2):
1415
+ if self.peek(k) not in u'0123456789ABCDEFabcdef':
1416
+ raise ScannerError("while scanning a %s" % name, start_mark,
1417
+ "expected URI escape sequence of 2 hexdecimal numbers, but found %r" %
1418
+ (self.peek(k).encode('utf-8')), self.get_mark())
1419
+ bytes.append(chr(int(self.prefix(2), 16)))
1420
+ self.forward(2)
1421
+ try:
1422
+ value = unicode(''.join(bytes), 'utf-8')
1423
+ except UnicodeDecodeError, exc:
1424
+ raise ScannerError("while scanning a %s" % name, start_mark, str(exc), mark)
1425
+ return value
1426
+
1427
+ def scan_line_break(self):
1428
+ # Transforms:
1429
+ # '\r\n' : '\n'
1430
+ # '\r' : '\n'
1431
+ # '\n' : '\n'
1432
+ # '\x85' : '\n'
1433
+ # '\u2028' : '\u2028'
1434
+ # '\u2029 : '\u2029'
1435
+ # default : ''
1436
+ ch = self.peek()
1437
+ if ch in u'\r\n\x85':
1438
+ if self.prefix(2) == u'\r\n':
1439
+ self.forward(2)
1440
+ else:
1441
+ self.forward()
1442
+ return u'\n'
1443
+ elif ch in u'\u2028\u2029':
1444
+ self.forward()
1445
+ return ch
1446
+ return u''
1447
+
1448
+ #try:
1449
+ # import psyco
1450
+ # psyco.bind(Scanner)
1451
+ #except ImportError:
1452
+ # pass
1453
+