http-parser 1.2.2 → 1.2.3

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: e23f8fd15e7969bbac10a4e34f3f0fcab492ba503f2f8bdf14a4134fd3c3dd0c
4
- data.tar.gz: d20b92acb6465ebaaf9bc8a8843241f6730e4a4ecfde5f198d6c5b389bd5ce9b
3
+ metadata.gz: c32dcf71a6dae540e33a54338404f6ed9a7c51eef0ad0913162c75152189c041
4
+ data.tar.gz: 3bd270c5e3362c3c3ec4274e623f028d1fd5f3d5d30c24c87fa7babba9e555ab
5
5
  SHA512:
6
- metadata.gz: ff9a8fb170d22563613b4743079d6104fd88bbf7d37b94ba71148567291c086a435026c360e59b53c1513d85a4a8c0942e938c5ca9e68728820bea551e18f20c
7
- data.tar.gz: 1bb252fcae0be2ab448812f5070b29b67b07839ae19fe091febcd31078fa08712efbce6cfbb76103edd854a8b588960b6d0403c4a591e734c765733284435878
6
+ metadata.gz: 9aef9b8b55d191a662cb831a51c726ed7eecaab049884523db8fe204326daa7aeb6c0ecf2e026e81eb8138274d20d44aaa6d6cfea8b7611d38e214982f860e38
7
+ data.tar.gz: 0d9bfde682dafa7e341eda1554cc7e9dc94c89de5dcff5df164a6c0bc5cde47deaa727eca04d706b138eea3e6d4a6351d81498f7c719240fd7244621c4502113
@@ -3,6 +3,4 @@ require 'ffi-compiler/compile_task'
3
3
  FFI::Compiler::CompileTask.new('http-parser-ext') do |t|
4
4
  t.cflags << "-Wall -Wextra -O3"
5
5
  t.cflags << "-D_GNU_SOURCE=1" if RbConfig::CONFIG["host_os"].downcase =~ /mingw/
6
- t.cflags << "-arch x86_64" if t.platform.mac?
7
- t.ldflags << "-arch x86_64" if t.platform.mac?
8
6
  end
@@ -25,8 +25,6 @@
25
25
  #include <string.h>
26
26
  #include <limits.h>
27
27
 
28
- static uint32_t max_header_size = HTTP_MAX_HEADER_SIZE;
29
-
30
28
  #ifndef ULLONG_MAX
31
29
  # define ULLONG_MAX ((uint64_t) -1) /* 2^64-1 */
32
30
  #endif
@@ -141,20 +139,20 @@ do { \
141
139
  } while (0)
142
140
 
143
141
  /* Don't allow the total size of the HTTP headers (including the status
144
- * line) to exceed max_header_size. This check is here to protect
142
+ * line) to exceed HTTP_MAX_HEADER_SIZE. This check is here to protect
145
143
  * embedders against denial-of-service attacks where the attacker feeds
146
144
  * us a never-ending header that the embedder keeps buffering.
147
145
  *
148
146
  * This check is arguably the responsibility of embedders but we're doing
149
147
  * it on the embedder's behalf because most won't bother and this way we
150
- * make the web a little safer. max_header_size is still far bigger
148
+ * make the web a little safer. HTTP_MAX_HEADER_SIZE is still far bigger
151
149
  * than any reasonable request or response so this should never affect
152
150
  * day-to-day operation.
153
151
  */
154
152
  #define COUNT_HEADER_SIZE(V) \
155
153
  do { \
156
- nread += (uint32_t)(V); \
157
- if (UNLIKELY(nread > max_header_size)) { \
154
+ nread += (V); \
155
+ if (UNLIKELY(nread > (HTTP_MAX_HEADER_SIZE))) { \
158
156
  SET_ERRNO(HPE_HEADER_OVERFLOW); \
159
157
  goto error; \
160
158
  } \
@@ -316,8 +314,6 @@ enum state
316
314
  , s_req_http_HT
317
315
  , s_req_http_HTT
318
316
  , s_req_http_HTTP
319
- , s_req_http_I
320
- , s_req_http_IC
321
317
  , s_req_http_major
322
318
  , s_req_http_dot
323
319
  , s_req_http_minor
@@ -381,10 +377,7 @@ enum header_states
381
377
  , h_transfer_encoding
382
378
  , h_upgrade
383
379
 
384
- , h_matching_transfer_encoding_token_start
385
380
  , h_matching_transfer_encoding_chunked
386
- , h_matching_transfer_encoding_token
387
-
388
381
  , h_matching_connection_token_start
389
382
  , h_matching_connection_keep_alive
390
383
  , h_matching_connection_close
@@ -653,8 +646,6 @@ size_t http_parser_execute (http_parser *parser,
653
646
  const char *status_mark = 0;
654
647
  enum state p_state = (enum state) parser->state;
655
648
  const unsigned int lenient = parser->lenient_http_headers;
656
- const unsigned int allow_chunked_length = parser->allow_chunked_length;
657
-
658
649
  uint32_t nread = parser->nread;
659
650
 
660
651
  /* We're in an error state. Don't bother doing anything. */
@@ -733,7 +724,6 @@ reexecute:
733
724
  if (ch == CR || ch == LF)
734
725
  break;
735
726
  parser->flags = 0;
736
- parser->uses_transfer_encoding = 0;
737
727
  parser->content_length = ULLONG_MAX;
738
728
 
739
729
  if (ch == 'H') {
@@ -771,7 +761,6 @@ reexecute:
771
761
  if (ch == CR || ch == LF)
772
762
  break;
773
763
  parser->flags = 0;
774
- parser->uses_transfer_encoding = 0;
775
764
  parser->content_length = ULLONG_MAX;
776
765
 
777
766
  if (ch == 'H') {
@@ -929,7 +918,6 @@ reexecute:
929
918
  if (ch == CR || ch == LF)
930
919
  break;
931
920
  parser->flags = 0;
932
- parser->uses_transfer_encoding = 0;
933
921
  parser->content_length = ULLONG_MAX;
934
922
 
935
923
  if (UNLIKELY(!IS_ALPHA(ch))) {
@@ -1096,17 +1084,11 @@ reexecute:
1096
1084
 
1097
1085
  case s_req_http_start:
1098
1086
  switch (ch) {
1099
- case ' ':
1100
- break;
1101
1087
  case 'H':
1102
1088
  UPDATE_STATE(s_req_http_H);
1103
1089
  break;
1104
- case 'I':
1105
- if (parser->method == HTTP_SOURCE) {
1106
- UPDATE_STATE(s_req_http_I);
1107
- break;
1108
- }
1109
- /* fall through */
1090
+ case ' ':
1091
+ break;
1110
1092
  default:
1111
1093
  SET_ERRNO(HPE_INVALID_CONSTANT);
1112
1094
  goto error;
@@ -1128,16 +1110,6 @@ reexecute:
1128
1110
  UPDATE_STATE(s_req_http_HTTP);
1129
1111
  break;
1130
1112
 
1131
- case s_req_http_I:
1132
- STRICT_CHECK(ch != 'C');
1133
- UPDATE_STATE(s_req_http_IC);
1134
- break;
1135
-
1136
- case s_req_http_IC:
1137
- STRICT_CHECK(ch != 'E');
1138
- UPDATE_STATE(s_req_http_HTTP); /* Treat "ICE" as "HTTP". */
1139
- break;
1140
-
1141
1113
  case s_req_http_HTTP:
1142
1114
  STRICT_CHECK(ch != '/');
1143
1115
  UPDATE_STATE(s_req_http_major);
@@ -1265,9 +1237,9 @@ reexecute:
1265
1237
 
1266
1238
  switch (parser->header_state) {
1267
1239
  case h_general: {
1268
- size_t left = data + len - p;
1269
- const char* pe = p + MIN(left, max_header_size);
1270
- while (p+1 < pe && TOKEN(p[1])) {
1240
+ size_t limit = data + len - p;
1241
+ limit = MIN(limit, HTTP_MAX_HEADER_SIZE);
1242
+ while (p+1 < data + limit && TOKEN(p[1])) {
1271
1243
  p++;
1272
1244
  }
1273
1245
  break;
@@ -1343,7 +1315,6 @@ reexecute:
1343
1315
  parser->header_state = h_general;
1344
1316
  } else if (parser->index == sizeof(TRANSFER_ENCODING)-2) {
1345
1317
  parser->header_state = h_transfer_encoding;
1346
- parser->uses_transfer_encoding = 1;
1347
1318
  }
1348
1319
  break;
1349
1320
 
@@ -1425,14 +1396,10 @@ reexecute:
1425
1396
  if ('c' == c) {
1426
1397
  parser->header_state = h_matching_transfer_encoding_chunked;
1427
1398
  } else {
1428
- parser->header_state = h_matching_transfer_encoding_token;
1399
+ parser->header_state = h_general;
1429
1400
  }
1430
1401
  break;
1431
1402
 
1432
- /* Multi-value `Transfer-Encoding` header */
1433
- case h_matching_transfer_encoding_token_start:
1434
- break;
1435
-
1436
1403
  case h_content_length:
1437
1404
  if (UNLIKELY(!IS_NUM(ch))) {
1438
1405
  SET_ERRNO(HPE_INVALID_CONTENT_LENGTH);
@@ -1449,11 +1416,6 @@ reexecute:
1449
1416
  parser->header_state = h_content_length_num;
1450
1417
  break;
1451
1418
 
1452
- /* when obsolete line folding is encountered for content length
1453
- * continue to the s_header_value state */
1454
- case h_content_length_ws:
1455
- break;
1456
-
1457
1419
  case h_connection:
1458
1420
  /* looking for 'Connection: keep-alive' */
1459
1421
  if (c == 'k') {
@@ -1509,25 +1471,28 @@ reexecute:
1509
1471
 
1510
1472
  switch (h_state) {
1511
1473
  case h_general:
1512
- {
1513
- size_t left = data + len - p;
1514
- const char* pe = p + MIN(left, max_header_size);
1515
-
1516
- for (; p != pe; p++) {
1517
- ch = *p;
1518
- if (ch == CR || ch == LF) {
1519
- --p;
1520
- break;
1521
- }
1522
- if (!lenient && !IS_HEADER_CHAR(ch)) {
1523
- SET_ERRNO(HPE_INVALID_HEADER_TOKEN);
1524
- goto error;
1525
- }
1526
- }
1527
- if (p == data + len)
1528
- --p;
1529
- break;
1474
+ {
1475
+ const char* p_cr;
1476
+ const char* p_lf;
1477
+ size_t limit = data + len - p;
1478
+
1479
+ limit = MIN(limit, HTTP_MAX_HEADER_SIZE);
1480
+
1481
+ p_cr = (const char*) memchr(p, CR, limit);
1482
+ p_lf = (const char*) memchr(p, LF, limit);
1483
+ if (p_cr != NULL) {
1484
+ if (p_lf != NULL && p_cr >= p_lf)
1485
+ p = p_lf;
1486
+ else
1487
+ p = p_cr;
1488
+ } else if (UNLIKELY(p_lf != NULL)) {
1489
+ p = p_lf;
1490
+ } else {
1491
+ p = data + len;
1530
1492
  }
1493
+ --p;
1494
+ break;
1495
+ }
1531
1496
 
1532
1497
  case h_connection:
1533
1498
  case h_transfer_encoding:
@@ -1576,41 +1541,16 @@ reexecute:
1576
1541
  goto error;
1577
1542
 
1578
1543
  /* Transfer-Encoding: chunked */
1579
- case h_matching_transfer_encoding_token_start:
1580
- /* looking for 'Transfer-Encoding: chunked' */
1581
- if ('c' == c) {
1582
- h_state = h_matching_transfer_encoding_chunked;
1583
- } else if (STRICT_TOKEN(c)) {
1584
- /* TODO(indutny): similar code below does this, but why?
1585
- * At the very least it seems to be inconsistent given that
1586
- * h_matching_transfer_encoding_token does not check for
1587
- * `STRICT_TOKEN`
1588
- */
1589
- h_state = h_matching_transfer_encoding_token;
1590
- } else if (c == ' ' || c == '\t') {
1591
- /* Skip lws */
1592
- } else {
1593
- h_state = h_general;
1594
- }
1595
- break;
1596
-
1597
1544
  case h_matching_transfer_encoding_chunked:
1598
1545
  parser->index++;
1599
1546
  if (parser->index > sizeof(CHUNKED)-1
1600
1547
  || c != CHUNKED[parser->index]) {
1601
- h_state = h_matching_transfer_encoding_token;
1548
+ h_state = h_general;
1602
1549
  } else if (parser->index == sizeof(CHUNKED)-2) {
1603
1550
  h_state = h_transfer_encoding_chunked;
1604
1551
  }
1605
1552
  break;
1606
1553
 
1607
- case h_matching_transfer_encoding_token:
1608
- if (ch == ',') {
1609
- h_state = h_matching_transfer_encoding_token_start;
1610
- parser->index = 0;
1611
- }
1612
- break;
1613
-
1614
1554
  case h_matching_connection_token_start:
1615
1555
  /* looking for 'Connection: keep-alive' */
1616
1556
  if (c == 'k') {
@@ -1669,7 +1609,7 @@ reexecute:
1669
1609
  break;
1670
1610
 
1671
1611
  case h_transfer_encoding_chunked:
1672
- if (ch != ' ') h_state = h_matching_transfer_encoding_token;
1612
+ if (ch != ' ') h_state = h_general;
1673
1613
  break;
1674
1614
 
1675
1615
  case h_connection_keep_alive:
@@ -1719,10 +1659,6 @@ reexecute:
1719
1659
  case s_header_value_lws:
1720
1660
  {
1721
1661
  if (ch == ' ' || ch == '\t') {
1722
- if (parser->header_state == h_content_length_num) {
1723
- /* treat obsolete line folding as space */
1724
- parser->header_state = h_content_length_ws;
1725
- }
1726
1662
  UPDATE_STATE(s_header_value_start);
1727
1663
  REEXECUTE();
1728
1664
  }
@@ -1775,11 +1711,6 @@ reexecute:
1775
1711
  case h_transfer_encoding_chunked:
1776
1712
  parser->flags |= F_CHUNKED;
1777
1713
  break;
1778
- case h_content_length:
1779
- /* do not allow empty content length */
1780
- SET_ERRNO(HPE_INVALID_CONTENT_LENGTH);
1781
- goto error;
1782
- break;
1783
1714
  default:
1784
1715
  break;
1785
1716
  }
@@ -1803,22 +1734,12 @@ reexecute:
1803
1734
  REEXECUTE();
1804
1735
  }
1805
1736
 
1806
- /* Cannot use transfer-encoding and a content-length header together
1807
- per the HTTP specification. (RFC 7230 Section 3.3.3) */
1808
- if ((parser->uses_transfer_encoding == 1) &&
1737
+ /* Cannot use chunked encoding and a content-length header together
1738
+ per the HTTP specification. */
1739
+ if ((parser->flags & F_CHUNKED) &&
1809
1740
  (parser->flags & F_CONTENTLENGTH)) {
1810
- /* Allow it for lenient parsing as long as `Transfer-Encoding` is
1811
- * not `chunked` or allow_length_with_encoding is set
1812
- */
1813
- if (parser->flags & F_CHUNKED) {
1814
- if (!allow_chunked_length) {
1815
- SET_ERRNO(HPE_UNEXPECTED_CONTENT_LENGTH);
1816
- goto error;
1817
- }
1818
- } else if (!lenient) {
1819
- SET_ERRNO(HPE_UNEXPECTED_CONTENT_LENGTH);
1820
- goto error;
1821
- }
1741
+ SET_ERRNO(HPE_UNEXPECTED_CONTENT_LENGTH);
1742
+ goto error;
1822
1743
  }
1823
1744
 
1824
1745
  UPDATE_STATE(s_headers_done);
@@ -1893,31 +1814,8 @@ reexecute:
1893
1814
  UPDATE_STATE(NEW_MESSAGE());
1894
1815
  CALLBACK_NOTIFY(message_complete);
1895
1816
  } else if (parser->flags & F_CHUNKED) {
1896
- /* chunked encoding - ignore Content-Length header,
1897
- * prepare for a chunk */
1817
+ /* chunked encoding - ignore Content-Length header */
1898
1818
  UPDATE_STATE(s_chunk_size_start);
1899
- } else if (parser->uses_transfer_encoding == 1) {
1900
- if (parser->type == HTTP_REQUEST && !lenient) {
1901
- /* RFC 7230 3.3.3 */
1902
-
1903
- /* If a Transfer-Encoding header field
1904
- * is present in a request and the chunked transfer coding is not
1905
- * the final encoding, the message body length cannot be determined
1906
- * reliably; the server MUST respond with the 400 (Bad Request)
1907
- * status code and then close the connection.
1908
- */
1909
- SET_ERRNO(HPE_INVALID_TRANSFER_ENCODING);
1910
- RETURN(p - data); /* Error */
1911
- } else {
1912
- /* RFC 7230 3.3.3 */
1913
-
1914
- /* If a Transfer-Encoding header field is present in a response and
1915
- * the chunked transfer coding is not the final encoding, the
1916
- * message body length is determined by reading the connection until
1917
- * it is closed by the server.
1918
- */
1919
- UPDATE_STATE(s_body_identity_eof);
1920
- }
1921
1819
  } else {
1922
1820
  if (parser->content_length == 0) {
1923
1821
  /* Content-Length header given but zero: Content-Length: 0\r\n */
@@ -2171,12 +2069,6 @@ http_message_needs_eof (const http_parser *parser)
2171
2069
  return 0;
2172
2070
  }
2173
2071
 
2174
- /* RFC 7230 3.3.3, see `s_headers_almost_done` */
2175
- if ((parser->uses_transfer_encoding == 1) &&
2176
- (parser->flags & F_CHUNKED) == 0) {
2177
- return 1;
2178
- }
2179
-
2180
2072
  if ((parser->flags & F_CHUNKED) || parser->content_length != ULLONG_MAX) {
2181
2073
  return 0;
2182
2074
  }
@@ -2355,14 +2247,14 @@ http_parse_host(const char * buf, struct http_parser_url *u, int found_at) {
2355
2247
  switch(new_s) {
2356
2248
  case s_http_host:
2357
2249
  if (s != s_http_host) {
2358
- u->field_data[UF_HOST].off = (uint16_t)(p - buf);
2250
+ u->field_data[UF_HOST].off = p - buf;
2359
2251
  }
2360
2252
  u->field_data[UF_HOST].len++;
2361
2253
  break;
2362
2254
 
2363
2255
  case s_http_host_v6:
2364
2256
  if (s != s_http_host_v6) {
2365
- u->field_data[UF_HOST].off = (uint16_t)(p - buf);
2257
+ u->field_data[UF_HOST].off = p - buf;
2366
2258
  }
2367
2259
  u->field_data[UF_HOST].len++;
2368
2260
  break;
@@ -2374,7 +2266,7 @@ http_parse_host(const char * buf, struct http_parser_url *u, int found_at) {
2374
2266
 
2375
2267
  case s_http_host_port:
2376
2268
  if (s != s_http_host_port) {
2377
- u->field_data[UF_PORT].off = (uint16_t)(p - buf);
2269
+ u->field_data[UF_PORT].off = p - buf;
2378
2270
  u->field_data[UF_PORT].len = 0;
2379
2271
  u->field_set |= (1 << UF_PORT);
2380
2272
  }
@@ -2383,7 +2275,7 @@ http_parse_host(const char * buf, struct http_parser_url *u, int found_at) {
2383
2275
 
2384
2276
  case s_http_userinfo:
2385
2277
  if (s != s_http_userinfo) {
2386
- u->field_data[UF_USERINFO].off = (uint16_t)(p - buf);
2278
+ u->field_data[UF_USERINFO].off = p - buf ;
2387
2279
  u->field_data[UF_USERINFO].len = 0;
2388
2280
  u->field_set |= (1 << UF_USERINFO);
2389
2281
  }
@@ -2487,7 +2379,7 @@ http_parser_parse_url(const char *buf, size_t buflen, int is_connect,
2487
2379
  continue;
2488
2380
  }
2489
2381
 
2490
- u->field_data[uf].off = (uint16_t)(p - buf);
2382
+ u->field_data[uf].off = p - buf;
2491
2383
  u->field_data[uf].len = 1;
2492
2384
 
2493
2385
  u->field_set |= (1 << uf);
@@ -2524,7 +2416,7 @@ http_parser_parse_url(const char *buf, size_t buflen, int is_connect,
2524
2416
  end = buf + off + len;
2525
2417
 
2526
2418
  /* NOTE: The characters are already validated and are in the [0-9] range */
2527
- assert((size_t) (off + len) <= buflen && "Port number overflow");
2419
+ assert(off + len <= buflen && "Port number overflow");
2528
2420
  v = 0;
2529
2421
  for (p = buf + off; p < end; p++) {
2530
2422
  v *= 10;
@@ -2568,8 +2460,3 @@ http_parser_version(void) {
2568
2460
  HTTP_PARSER_VERSION_MINOR * 0x00100 |
2569
2461
  HTTP_PARSER_VERSION_PATCH * 0x00001;
2570
2462
  }
2571
-
2572
- void
2573
- http_parser_set_max_header_size(uint32_t size) {
2574
- max_header_size = size;
2575
- }
@@ -26,8 +26,8 @@ extern "C" {
26
26
 
27
27
  /* Also update SONAME in the Makefile whenever you change these. */
28
28
  #define HTTP_PARSER_VERSION_MAJOR 2
29
- #define HTTP_PARSER_VERSION_MINOR 9
30
- #define HTTP_PARSER_VERSION_PATCH 4
29
+ #define HTTP_PARSER_VERSION_MINOR 8
30
+ #define HTTP_PARSER_VERSION_PATCH 1
31
31
 
32
32
  #include <stddef.h>
33
33
  #if defined(_WIN32) && !defined(__MINGW32__) && \
@@ -41,8 +41,6 @@ typedef __int32 int32_t;
41
41
  typedef unsigned __int32 uint32_t;
42
42
  typedef __int64 int64_t;
43
43
  typedef unsigned __int64 uint64_t;
44
- #elif (defined(__sun) || defined(__sun__)) && defined(__SunOS_5_9)
45
- #include <sys/inttypes.h>
46
44
  #else
47
45
  #include <stdint.h>
48
46
  #endif
@@ -277,9 +275,7 @@ enum flags
277
275
  XX(INVALID_INTERNAL_STATE, "encountered unexpected internal state")\
278
276
  XX(STRICT, "strict mode assertion failed") \
279
277
  XX(PAUSED, "parser is paused") \
280
- XX(UNKNOWN, "an unknown error occurred") \
281
- XX(INVALID_TRANSFER_ENCODING, \
282
- "request has invalid transfer-encoding") \
278
+ XX(UNKNOWN, "an unknown error occurred")
283
279
 
284
280
 
285
281
  /* Define HPE_* values for each errno value above */
@@ -297,20 +293,14 @@ enum http_errno {
297
293
  struct http_parser {
298
294
  /** PRIVATE **/
299
295
  unsigned int type : 2; /* enum http_parser_type */
300
- unsigned int flags : 8; /* F_* values from 'flags' enum; semi-public */
296
+ unsigned int flags : 8; /* F_* values from 'flags' enum; semi-public */
301
297
  unsigned int state : 7; /* enum state from http_parser.c */
302
298
  unsigned int header_state : 7; /* enum header_state from http_parser.c */
303
- unsigned int index : 5; /* index into current matcher */
304
- unsigned int uses_transfer_encoding : 1; /* Transfer-Encoding header is present */
305
- unsigned int allow_chunked_length : 1; /* Allow headers with both
306
- * `Content-Length` and
307
- * `Transfer-Encoding: chunked` set */
299
+ unsigned int index : 7; /* index into current matcher */
308
300
  unsigned int lenient_http_headers : 1;
309
301
 
310
302
  uint32_t nread; /* # bytes read in various scenarios */
311
- uint64_t content_length; /* # bytes in body. `(uint64_t) -1` (all bits one)
312
- * if no Content-Length header.
313
- */
303
+ uint64_t content_length; /* # bytes in body (0 if no Content-Length header) */
314
304
 
315
305
  /** READ-ONLY **/
316
306
  unsigned short http_major;
@@ -440,9 +430,6 @@ void http_parser_pause(http_parser *parser, int paused);
440
430
  /* Checks if this is the final chunk of the body. */
441
431
  int http_body_is_final(const http_parser *parser);
442
432
 
443
- /* Change the maximum header size provided at compile time. */
444
- void http_parser_set_max_header_size(uint32_t size);
445
-
446
433
  #ifdef __cplusplus
447
434
  }
448
435
  #endif
@@ -18,12 +18,12 @@ Gem::Specification.new do |s|
18
18
  EOF
19
19
 
20
20
 
21
- s.add_dependency 'ffi-compiler'
21
+ s.add_dependency 'ffi-compiler', '>= 1.0', '< 2.0'
22
22
 
23
23
  s.add_development_dependency 'rake', '~> 11.2'
24
24
  s.add_development_dependency 'rspec', '~> 3.5'
25
25
  s.add_development_dependency 'yard', '~> 0.9'
26
-
26
+
27
27
 
28
28
  s.files = Dir["{lib}/**/*"] + %w(Rakefile http-parser.gemspec README.md LICENSE)
29
29
  s.files += ["ext/http-parser/http_parser.c", "ext/http-parser/http_parser.h"]
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module HttpParser
4
- VERSION = "1.2.2"
4
+ VERSION = "1.2.3"
5
5
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: http-parser
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.2.2
4
+ version: 1.2.3
5
5
  platform: ruby
6
6
  authors:
7
7
  - Stephen von Takach
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2020-11-16 00:00:00.000000000 Z
11
+ date: 2021-01-11 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: ffi-compiler
@@ -16,14 +16,20 @@ dependencies:
16
16
  requirements:
17
17
  - - ">="
18
18
  - !ruby/object:Gem::Version
19
- version: '0'
19
+ version: '1.0'
20
+ - - "<"
21
+ - !ruby/object:Gem::Version
22
+ version: '2.0'
20
23
  type: :runtime
21
24
  prerelease: false
22
25
  version_requirements: !ruby/object:Gem::Requirement
23
26
  requirements:
24
27
  - - ">="
25
28
  - !ruby/object:Gem::Version
26
- version: '0'
29
+ version: '1.0'
30
+ - - "<"
31
+ - !ruby/object:Gem::Version
32
+ version: '2.0'
27
33
  - !ruby/object:Gem::Dependency
28
34
  name: rake
29
35
  requirement: !ruby/object:Gem::Requirement