http-parser 1.2.1 → 1.2.2

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: b10418834197ee747e8e8b0f731efef8f1f80285f05426292b627edadc7da52a
4
- data.tar.gz: 5e159cadc303a5a811754eeac131d56bb5e6f8eff66be4650c049c8fc10a4a2d
3
+ metadata.gz: e23f8fd15e7969bbac10a4e34f3f0fcab492ba503f2f8bdf14a4134fd3c3dd0c
4
+ data.tar.gz: d20b92acb6465ebaaf9bc8a8843241f6730e4a4ecfde5f198d6c5b389bd5ce9b
5
5
  SHA512:
6
- metadata.gz: 3fb875b674223383319ab6b02b019acf27964cddec2bb732773d4315f1aac472529b27b1a4edbf8bdbfd5441117c8a274009ad0ace5457082918e42043aa75ef
7
- data.tar.gz: 3ada5a0f5e72ff81495e6b97c3ea20d16aa7b536fe6a7425477e73360228e3983c92788663ed3fe76e3c1b194230092f1bbed1274e68cf853bdc583c1d053cda
6
+ metadata.gz: ff9a8fb170d22563613b4743079d6104fd88bbf7d37b94ba71148567291c086a435026c360e59b53c1513d85a4a8c0942e938c5ca9e68728820bea551e18f20c
7
+ data.tar.gz: 1bb252fcae0be2ab448812f5070b29b67b07839ae19fe091febcd31078fa08712efbce6cfbb76103edd854a8b588960b6d0403c4a591e734c765733284435878
@@ -25,6 +25,8 @@
25
25
  #include <string.h>
26
26
  #include <limits.h>
27
27
 
28
+ static uint32_t max_header_size = HTTP_MAX_HEADER_SIZE;
29
+
28
30
  #ifndef ULLONG_MAX
29
31
  # define ULLONG_MAX ((uint64_t) -1) /* 2^64-1 */
30
32
  #endif
@@ -139,20 +141,20 @@ do { \
139
141
  } while (0)
140
142
 
141
143
  /* Don't allow the total size of the HTTP headers (including the status
142
- * line) to exceed HTTP_MAX_HEADER_SIZE. This check is here to protect
144
+ * line) to exceed max_header_size. This check is here to protect
143
145
  * embedders against denial-of-service attacks where the attacker feeds
144
146
  * us a never-ending header that the embedder keeps buffering.
145
147
  *
146
148
  * This check is arguably the responsibility of embedders but we're doing
147
149
  * it on the embedder's behalf because most won't bother and this way we
148
- * make the web a little safer. HTTP_MAX_HEADER_SIZE is still far bigger
150
+ * make the web a little safer. max_header_size is still far bigger
149
151
  * than any reasonable request or response so this should never affect
150
152
  * day-to-day operation.
151
153
  */
152
154
  #define COUNT_HEADER_SIZE(V) \
153
155
  do { \
154
- nread += (V); \
155
- if (UNLIKELY(nread > (HTTP_MAX_HEADER_SIZE))) { \
156
+ nread += (uint32_t)(V); \
157
+ if (UNLIKELY(nread > max_header_size)) { \
156
158
  SET_ERRNO(HPE_HEADER_OVERFLOW); \
157
159
  goto error; \
158
160
  } \
@@ -314,6 +316,8 @@ enum state
314
316
  , s_req_http_HT
315
317
  , s_req_http_HTT
316
318
  , s_req_http_HTTP
319
+ , s_req_http_I
320
+ , s_req_http_IC
317
321
  , s_req_http_major
318
322
  , s_req_http_dot
319
323
  , s_req_http_minor
@@ -377,7 +381,10 @@ enum header_states
377
381
  , h_transfer_encoding
378
382
  , h_upgrade
379
383
 
384
+ , h_matching_transfer_encoding_token_start
380
385
  , h_matching_transfer_encoding_chunked
386
+ , h_matching_transfer_encoding_token
387
+
381
388
  , h_matching_connection_token_start
382
389
  , h_matching_connection_keep_alive
383
390
  , h_matching_connection_close
@@ -646,6 +653,8 @@ size_t http_parser_execute (http_parser *parser,
646
653
  const char *status_mark = 0;
647
654
  enum state p_state = (enum state) parser->state;
648
655
  const unsigned int lenient = parser->lenient_http_headers;
656
+ const unsigned int allow_chunked_length = parser->allow_chunked_length;
657
+
649
658
  uint32_t nread = parser->nread;
650
659
 
651
660
  /* We're in an error state. Don't bother doing anything. */
@@ -724,6 +733,7 @@ reexecute:
724
733
  if (ch == CR || ch == LF)
725
734
  break;
726
735
  parser->flags = 0;
736
+ parser->uses_transfer_encoding = 0;
727
737
  parser->content_length = ULLONG_MAX;
728
738
 
729
739
  if (ch == 'H') {
@@ -761,6 +771,7 @@ reexecute:
761
771
  if (ch == CR || ch == LF)
762
772
  break;
763
773
  parser->flags = 0;
774
+ parser->uses_transfer_encoding = 0;
764
775
  parser->content_length = ULLONG_MAX;
765
776
 
766
777
  if (ch == 'H') {
@@ -918,6 +929,7 @@ reexecute:
918
929
  if (ch == CR || ch == LF)
919
930
  break;
920
931
  parser->flags = 0;
932
+ parser->uses_transfer_encoding = 0;
921
933
  parser->content_length = ULLONG_MAX;
922
934
 
923
935
  if (UNLIKELY(!IS_ALPHA(ch))) {
@@ -1084,11 +1096,17 @@ reexecute:
1084
1096
 
1085
1097
  case s_req_http_start:
1086
1098
  switch (ch) {
1099
+ case ' ':
1100
+ break;
1087
1101
  case 'H':
1088
1102
  UPDATE_STATE(s_req_http_H);
1089
1103
  break;
1090
- case ' ':
1091
- break;
1104
+ case 'I':
1105
+ if (parser->method == HTTP_SOURCE) {
1106
+ UPDATE_STATE(s_req_http_I);
1107
+ break;
1108
+ }
1109
+ /* fall through */
1092
1110
  default:
1093
1111
  SET_ERRNO(HPE_INVALID_CONSTANT);
1094
1112
  goto error;
@@ -1110,6 +1128,16 @@ reexecute:
1110
1128
  UPDATE_STATE(s_req_http_HTTP);
1111
1129
  break;
1112
1130
 
1131
+ case s_req_http_I:
1132
+ STRICT_CHECK(ch != 'C');
1133
+ UPDATE_STATE(s_req_http_IC);
1134
+ break;
1135
+
1136
+ case s_req_http_IC:
1137
+ STRICT_CHECK(ch != 'E');
1138
+ UPDATE_STATE(s_req_http_HTTP); /* Treat "ICE" as "HTTP". */
1139
+ break;
1140
+
1113
1141
  case s_req_http_HTTP:
1114
1142
  STRICT_CHECK(ch != '/');
1115
1143
  UPDATE_STATE(s_req_http_major);
@@ -1237,9 +1265,9 @@ reexecute:
1237
1265
 
1238
1266
  switch (parser->header_state) {
1239
1267
  case h_general: {
1240
- size_t limit = data + len - p;
1241
- limit = MIN(limit, HTTP_MAX_HEADER_SIZE);
1242
- while (p+1 < data + limit && TOKEN(p[1])) {
1268
+ size_t left = data + len - p;
1269
+ const char* pe = p + MIN(left, max_header_size);
1270
+ while (p+1 < pe && TOKEN(p[1])) {
1243
1271
  p++;
1244
1272
  }
1245
1273
  break;
@@ -1315,6 +1343,7 @@ reexecute:
1315
1343
  parser->header_state = h_general;
1316
1344
  } else if (parser->index == sizeof(TRANSFER_ENCODING)-2) {
1317
1345
  parser->header_state = h_transfer_encoding;
1346
+ parser->uses_transfer_encoding = 1;
1318
1347
  }
1319
1348
  break;
1320
1349
 
@@ -1396,10 +1425,14 @@ reexecute:
1396
1425
  if ('c' == c) {
1397
1426
  parser->header_state = h_matching_transfer_encoding_chunked;
1398
1427
  } else {
1399
- parser->header_state = h_general;
1428
+ parser->header_state = h_matching_transfer_encoding_token;
1400
1429
  }
1401
1430
  break;
1402
1431
 
1432
+ /* Multi-value `Transfer-Encoding` header */
1433
+ case h_matching_transfer_encoding_token_start:
1434
+ break;
1435
+
1403
1436
  case h_content_length:
1404
1437
  if (UNLIKELY(!IS_NUM(ch))) {
1405
1438
  SET_ERRNO(HPE_INVALID_CONTENT_LENGTH);
@@ -1416,6 +1449,11 @@ reexecute:
1416
1449
  parser->header_state = h_content_length_num;
1417
1450
  break;
1418
1451
 
1452
+ /* when obsolete line folding is encountered for content length
1453
+ * continue to the s_header_value state */
1454
+ case h_content_length_ws:
1455
+ break;
1456
+
1419
1457
  case h_connection:
1420
1458
  /* looking for 'Connection: keep-alive' */
1421
1459
  if (c == 'k') {
@@ -1471,28 +1509,25 @@ reexecute:
1471
1509
 
1472
1510
  switch (h_state) {
1473
1511
  case h_general:
1474
- {
1475
- const char* p_cr;
1476
- const char* p_lf;
1477
- size_t limit = data + len - p;
1478
-
1479
- limit = MIN(limit, HTTP_MAX_HEADER_SIZE);
1480
-
1481
- p_cr = (const char*) memchr(p, CR, limit);
1482
- p_lf = (const char*) memchr(p, LF, limit);
1483
- if (p_cr != NULL) {
1484
- if (p_lf != NULL && p_cr >= p_lf)
1485
- p = p_lf;
1486
- else
1487
- p = p_cr;
1488
- } else if (UNLIKELY(p_lf != NULL)) {
1489
- p = p_lf;
1490
- } else {
1491
- p = data + len;
1512
+ {
1513
+ size_t left = data + len - p;
1514
+ const char* pe = p + MIN(left, max_header_size);
1515
+
1516
+ for (; p != pe; p++) {
1517
+ ch = *p;
1518
+ if (ch == CR || ch == LF) {
1519
+ --p;
1520
+ break;
1521
+ }
1522
+ if (!lenient && !IS_HEADER_CHAR(ch)) {
1523
+ SET_ERRNO(HPE_INVALID_HEADER_TOKEN);
1524
+ goto error;
1525
+ }
1526
+ }
1527
+ if (p == data + len)
1528
+ --p;
1529
+ break;
1492
1530
  }
1493
- --p;
1494
- break;
1495
- }
1496
1531
 
1497
1532
  case h_connection:
1498
1533
  case h_transfer_encoding:
@@ -1541,16 +1576,41 @@ reexecute:
1541
1576
  goto error;
1542
1577
 
1543
1578
  /* Transfer-Encoding: chunked */
1579
+ case h_matching_transfer_encoding_token_start:
1580
+ /* looking for 'Transfer-Encoding: chunked' */
1581
+ if ('c' == c) {
1582
+ h_state = h_matching_transfer_encoding_chunked;
1583
+ } else if (STRICT_TOKEN(c)) {
1584
+ /* TODO(indutny): similar code below does this, but why?
1585
+ * At the very least it seems to be inconsistent given that
1586
+ * h_matching_transfer_encoding_token does not check for
1587
+ * `STRICT_TOKEN`
1588
+ */
1589
+ h_state = h_matching_transfer_encoding_token;
1590
+ } else if (c == ' ' || c == '\t') {
1591
+ /* Skip lws */
1592
+ } else {
1593
+ h_state = h_general;
1594
+ }
1595
+ break;
1596
+
1544
1597
  case h_matching_transfer_encoding_chunked:
1545
1598
  parser->index++;
1546
1599
  if (parser->index > sizeof(CHUNKED)-1
1547
1600
  || c != CHUNKED[parser->index]) {
1548
- h_state = h_general;
1601
+ h_state = h_matching_transfer_encoding_token;
1549
1602
  } else if (parser->index == sizeof(CHUNKED)-2) {
1550
1603
  h_state = h_transfer_encoding_chunked;
1551
1604
  }
1552
1605
  break;
1553
1606
 
1607
+ case h_matching_transfer_encoding_token:
1608
+ if (ch == ',') {
1609
+ h_state = h_matching_transfer_encoding_token_start;
1610
+ parser->index = 0;
1611
+ }
1612
+ break;
1613
+
1554
1614
  case h_matching_connection_token_start:
1555
1615
  /* looking for 'Connection: keep-alive' */
1556
1616
  if (c == 'k') {
@@ -1609,7 +1669,7 @@ reexecute:
1609
1669
  break;
1610
1670
 
1611
1671
  case h_transfer_encoding_chunked:
1612
- if (ch != ' ') h_state = h_general;
1672
+ if (ch != ' ') h_state = h_matching_transfer_encoding_token;
1613
1673
  break;
1614
1674
 
1615
1675
  case h_connection_keep_alive:
@@ -1659,6 +1719,10 @@ reexecute:
1659
1719
  case s_header_value_lws:
1660
1720
  {
1661
1721
  if (ch == ' ' || ch == '\t') {
1722
+ if (parser->header_state == h_content_length_num) {
1723
+ /* treat obsolete line folding as space */
1724
+ parser->header_state = h_content_length_ws;
1725
+ }
1662
1726
  UPDATE_STATE(s_header_value_start);
1663
1727
  REEXECUTE();
1664
1728
  }
@@ -1711,6 +1775,11 @@ reexecute:
1711
1775
  case h_transfer_encoding_chunked:
1712
1776
  parser->flags |= F_CHUNKED;
1713
1777
  break;
1778
+ case h_content_length:
1779
+ /* do not allow empty content length */
1780
+ SET_ERRNO(HPE_INVALID_CONTENT_LENGTH);
1781
+ goto error;
1782
+ break;
1714
1783
  default:
1715
1784
  break;
1716
1785
  }
@@ -1734,12 +1803,22 @@ reexecute:
1734
1803
  REEXECUTE();
1735
1804
  }
1736
1805
 
1737
- /* Cannot use chunked encoding and a content-length header together
1738
- per the HTTP specification. */
1739
- if ((parser->flags & F_CHUNKED) &&
1806
+ /* Cannot use transfer-encoding and a content-length header together
1807
+ per the HTTP specification. (RFC 7230 Section 3.3.3) */
1808
+ if ((parser->uses_transfer_encoding == 1) &&
1740
1809
  (parser->flags & F_CONTENTLENGTH)) {
1741
- SET_ERRNO(HPE_UNEXPECTED_CONTENT_LENGTH);
1742
- goto error;
1810
+ /* Allow it for lenient parsing as long as `Transfer-Encoding` is
1811
+ * not `chunked` or allow_length_with_encoding is set
1812
+ */
1813
+ if (parser->flags & F_CHUNKED) {
1814
+ if (!allow_chunked_length) {
1815
+ SET_ERRNO(HPE_UNEXPECTED_CONTENT_LENGTH);
1816
+ goto error;
1817
+ }
1818
+ } else if (!lenient) {
1819
+ SET_ERRNO(HPE_UNEXPECTED_CONTENT_LENGTH);
1820
+ goto error;
1821
+ }
1743
1822
  }
1744
1823
 
1745
1824
  UPDATE_STATE(s_headers_done);
@@ -1814,8 +1893,31 @@ reexecute:
1814
1893
  UPDATE_STATE(NEW_MESSAGE());
1815
1894
  CALLBACK_NOTIFY(message_complete);
1816
1895
  } else if (parser->flags & F_CHUNKED) {
1817
- /* chunked encoding - ignore Content-Length header */
1896
+ /* chunked encoding - ignore Content-Length header,
1897
+ * prepare for a chunk */
1818
1898
  UPDATE_STATE(s_chunk_size_start);
1899
+ } else if (parser->uses_transfer_encoding == 1) {
1900
+ if (parser->type == HTTP_REQUEST && !lenient) {
1901
+ /* RFC 7230 3.3.3 */
1902
+
1903
+ /* If a Transfer-Encoding header field
1904
+ * is present in a request and the chunked transfer coding is not
1905
+ * the final encoding, the message body length cannot be determined
1906
+ * reliably; the server MUST respond with the 400 (Bad Request)
1907
+ * status code and then close the connection.
1908
+ */
1909
+ SET_ERRNO(HPE_INVALID_TRANSFER_ENCODING);
1910
+ RETURN(p - data); /* Error */
1911
+ } else {
1912
+ /* RFC 7230 3.3.3 */
1913
+
1914
+ /* If a Transfer-Encoding header field is present in a response and
1915
+ * the chunked transfer coding is not the final encoding, the
1916
+ * message body length is determined by reading the connection until
1917
+ * it is closed by the server.
1918
+ */
1919
+ UPDATE_STATE(s_body_identity_eof);
1920
+ }
1819
1921
  } else {
1820
1922
  if (parser->content_length == 0) {
1821
1923
  /* Content-Length header given but zero: Content-Length: 0\r\n */
@@ -2069,6 +2171,12 @@ http_message_needs_eof (const http_parser *parser)
2069
2171
  return 0;
2070
2172
  }
2071
2173
 
2174
+ /* RFC 7230 3.3.3, see `s_headers_almost_done` */
2175
+ if ((parser->uses_transfer_encoding == 1) &&
2176
+ (parser->flags & F_CHUNKED) == 0) {
2177
+ return 1;
2178
+ }
2179
+
2072
2180
  if ((parser->flags & F_CHUNKED) || parser->content_length != ULLONG_MAX) {
2073
2181
  return 0;
2074
2182
  }
@@ -2247,14 +2355,14 @@ http_parse_host(const char * buf, struct http_parser_url *u, int found_at) {
2247
2355
  switch(new_s) {
2248
2356
  case s_http_host:
2249
2357
  if (s != s_http_host) {
2250
- u->field_data[UF_HOST].off = p - buf;
2358
+ u->field_data[UF_HOST].off = (uint16_t)(p - buf);
2251
2359
  }
2252
2360
  u->field_data[UF_HOST].len++;
2253
2361
  break;
2254
2362
 
2255
2363
  case s_http_host_v6:
2256
2364
  if (s != s_http_host_v6) {
2257
- u->field_data[UF_HOST].off = p - buf;
2365
+ u->field_data[UF_HOST].off = (uint16_t)(p - buf);
2258
2366
  }
2259
2367
  u->field_data[UF_HOST].len++;
2260
2368
  break;
@@ -2266,7 +2374,7 @@ http_parse_host(const char * buf, struct http_parser_url *u, int found_at) {
2266
2374
 
2267
2375
  case s_http_host_port:
2268
2376
  if (s != s_http_host_port) {
2269
- u->field_data[UF_PORT].off = p - buf;
2377
+ u->field_data[UF_PORT].off = (uint16_t)(p - buf);
2270
2378
  u->field_data[UF_PORT].len = 0;
2271
2379
  u->field_set |= (1 << UF_PORT);
2272
2380
  }
@@ -2275,7 +2383,7 @@ http_parse_host(const char * buf, struct http_parser_url *u, int found_at) {
2275
2383
 
2276
2384
  case s_http_userinfo:
2277
2385
  if (s != s_http_userinfo) {
2278
- u->field_data[UF_USERINFO].off = p - buf ;
2386
+ u->field_data[UF_USERINFO].off = (uint16_t)(p - buf);
2279
2387
  u->field_data[UF_USERINFO].len = 0;
2280
2388
  u->field_set |= (1 << UF_USERINFO);
2281
2389
  }
@@ -2379,7 +2487,7 @@ http_parser_parse_url(const char *buf, size_t buflen, int is_connect,
2379
2487
  continue;
2380
2488
  }
2381
2489
 
2382
- u->field_data[uf].off = p - buf;
2490
+ u->field_data[uf].off = (uint16_t)(p - buf);
2383
2491
  u->field_data[uf].len = 1;
2384
2492
 
2385
2493
  u->field_set |= (1 << uf);
@@ -2416,7 +2524,7 @@ http_parser_parse_url(const char *buf, size_t buflen, int is_connect,
2416
2524
  end = buf + off + len;
2417
2525
 
2418
2526
  /* NOTE: The characters are already validated and are in the [0-9] range */
2419
- assert(off + len <= buflen && "Port number overflow");
2527
+ assert((size_t) (off + len) <= buflen && "Port number overflow");
2420
2528
  v = 0;
2421
2529
  for (p = buf + off; p < end; p++) {
2422
2530
  v *= 10;
@@ -2460,3 +2568,8 @@ http_parser_version(void) {
2460
2568
  HTTP_PARSER_VERSION_MINOR * 0x00100 |
2461
2569
  HTTP_PARSER_VERSION_PATCH * 0x00001;
2462
2570
  }
2571
+
2572
+ void
2573
+ http_parser_set_max_header_size(uint32_t size) {
2574
+ max_header_size = size;
2575
+ }
@@ -26,8 +26,8 @@ extern "C" {
26
26
 
27
27
  /* Also update SONAME in the Makefile whenever you change these. */
28
28
  #define HTTP_PARSER_VERSION_MAJOR 2
29
- #define HTTP_PARSER_VERSION_MINOR 8
30
- #define HTTP_PARSER_VERSION_PATCH 1
29
+ #define HTTP_PARSER_VERSION_MINOR 9
30
+ #define HTTP_PARSER_VERSION_PATCH 4
31
31
 
32
32
  #include <stddef.h>
33
33
  #if defined(_WIN32) && !defined(__MINGW32__) && \
@@ -41,6 +41,8 @@ typedef __int32 int32_t;
41
41
  typedef unsigned __int32 uint32_t;
42
42
  typedef __int64 int64_t;
43
43
  typedef unsigned __int64 uint64_t;
44
+ #elif (defined(__sun) || defined(__sun__)) && defined(__SunOS_5_9)
45
+ #include <sys/inttypes.h>
44
46
  #else
45
47
  #include <stdint.h>
46
48
  #endif
@@ -275,7 +277,9 @@ enum flags
275
277
  XX(INVALID_INTERNAL_STATE, "encountered unexpected internal state")\
276
278
  XX(STRICT, "strict mode assertion failed") \
277
279
  XX(PAUSED, "parser is paused") \
278
- XX(UNKNOWN, "an unknown error occurred")
280
+ XX(UNKNOWN, "an unknown error occurred") \
281
+ XX(INVALID_TRANSFER_ENCODING, \
282
+ "request has invalid transfer-encoding") \
279
283
 
280
284
 
281
285
  /* Define HPE_* values for each errno value above */
@@ -293,14 +297,20 @@ enum http_errno {
293
297
  struct http_parser {
294
298
  /** PRIVATE **/
295
299
  unsigned int type : 2; /* enum http_parser_type */
296
- unsigned int flags : 8; /* F_* values from 'flags' enum; semi-public */
300
+ unsigned int flags : 8; /* F_* values from 'flags' enum; semi-public */
297
301
  unsigned int state : 7; /* enum state from http_parser.c */
298
302
  unsigned int header_state : 7; /* enum header_state from http_parser.c */
299
- unsigned int index : 7; /* index into current matcher */
303
+ unsigned int index : 5; /* index into current matcher */
304
+ unsigned int uses_transfer_encoding : 1; /* Transfer-Encoding header is present */
305
+ unsigned int allow_chunked_length : 1; /* Allow headers with both
306
+ * `Content-Length` and
307
+ * `Transfer-Encoding: chunked` set */
300
308
  unsigned int lenient_http_headers : 1;
301
309
 
302
310
  uint32_t nread; /* # bytes read in various scenarios */
303
- uint64_t content_length; /* # bytes in body (0 if no Content-Length header) */
311
+ uint64_t content_length; /* # bytes in body. `(uint64_t) -1` (all bits one)
312
+ * if no Content-Length header.
313
+ */
304
314
 
305
315
  /** READ-ONLY **/
306
316
  unsigned short http_major;
@@ -430,6 +440,9 @@ void http_parser_pause(http_parser *parser, int paused);
430
440
  /* Checks if this is the final chunk of the body. */
431
441
  int http_body_is_final(const http_parser *parser);
432
442
 
443
+ /* Change the maximum header size provided at compile time. */
444
+ void http_parser_set_max_header_size(uint32_t size);
445
+
433
446
  #ifdef __cplusplus
434
447
  }
435
448
  #endif
@@ -18,12 +18,12 @@ Gem::Specification.new do |s|
18
18
  EOF
19
19
 
20
20
 
21
- s.add_dependency 'ffi-compiler', '>= 1.0', '< 2.0'
21
+ s.add_dependency 'ffi-compiler'
22
22
 
23
23
  s.add_development_dependency 'rake', '~> 11.2'
24
24
  s.add_development_dependency 'rspec', '~> 3.5'
25
25
  s.add_development_dependency 'yard', '~> 0.9'
26
-
26
+
27
27
 
28
28
  s.files = Dir["{lib}/**/*"] + %w(Rakefile http-parser.gemspec README.md LICENSE)
29
29
  s.files += ["ext/http-parser/http_parser.c", "ext/http-parser/http_parser.h"]
@@ -10,8 +10,8 @@ module HttpParser
10
10
  #
11
11
  # Returns a new request/response instance variable
12
12
  #
13
- def self.new_instance &block
14
- ::HttpParser::Instance.new &block
13
+ def self.new_instance(&block)
14
+ ::HttpParser::Instance.new(&block)
15
15
  end
16
16
 
17
17
 
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module HttpParser
4
- VERSION = "1.2.1"
4
+ VERSION = "1.2.2"
5
5
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: http-parser
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.2.1
4
+ version: 1.2.2
5
5
  platform: ruby
6
6
  authors:
7
7
  - Stephen von Takach
8
- autorequire:
8
+ autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2018-10-11 00:00:00.000000000 Z
11
+ date: 2020-11-16 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: ffi-compiler
@@ -16,20 +16,14 @@ dependencies:
16
16
  requirements:
17
17
  - - ">="
18
18
  - !ruby/object:Gem::Version
19
- version: '1.0'
20
- - - "<"
21
- - !ruby/object:Gem::Version
22
- version: '2.0'
19
+ version: '0'
23
20
  type: :runtime
24
21
  prerelease: false
25
22
  version_requirements: !ruby/object:Gem::Requirement
26
23
  requirements:
27
24
  - - ">="
28
25
  - !ruby/object:Gem::Version
29
- version: '1.0'
30
- - - "<"
31
- - !ruby/object:Gem::Version
32
- version: '2.0'
26
+ version: '0'
33
27
  - !ruby/object:Gem::Dependency
34
28
  name: rake
35
29
  requirement: !ruby/object:Gem::Requirement
@@ -103,7 +97,7 @@ homepage: https://github.com/cotag/http-parser
103
97
  licenses:
104
98
  - MIT
105
99
  metadata: {}
106
- post_install_message:
100
+ post_install_message:
107
101
  rdoc_options: []
108
102
  require_paths:
109
103
  - lib
@@ -118,9 +112,9 @@ required_rubygems_version: !ruby/object:Gem::Requirement
118
112
  - !ruby/object:Gem::Version
119
113
  version: '0'
120
114
  requirements: []
121
- rubyforge_project:
115
+ rubyforge_project:
122
116
  rubygems_version: 2.7.7
123
- signing_key:
117
+ signing_key:
124
118
  specification_version: 4
125
119
  summary: Ruby bindings to joyent/http-parser
126
120
  test_files: