e621_export_downloader 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.irbrc +14 -0
- data/.ruby-version +1 -0
- data/LICENSE +21 -0
- data/README.md +148 -0
- data/Rakefile +8 -0
- data/exe/e621-export-downloader +112 -0
- data/lib/e621_export_downloader/client/options/builder/parsers.rb +42 -0
- data/lib/e621_export_downloader/client/options/builder.rb +44 -0
- data/lib/e621_export_downloader/client/options.rb +37 -0
- data/lib/e621_export_downloader/client.rb +120 -0
- data/lib/e621_export_downloader/constants.rb +17 -0
- data/lib/e621_export_downloader/export.rb +128 -0
- data/lib/e621_export_downloader/export_helper.rb +83 -0
- data/lib/e621_export_downloader/models/pool.rb +69 -0
- data/lib/e621_export_downloader/models/post.rb +166 -0
- data/lib/e621_export_downloader/models/tag.rb +41 -0
- data/lib/e621_export_downloader/models/tag_alias.rb +46 -0
- data/lib/e621_export_downloader/models/tag_implication.rb +46 -0
- data/lib/e621_export_downloader/models/wiki_page.rb +61 -0
- data/lib/e621_export_downloader/types.rb +14 -0
- data/lib/e621_export_downloader/version.rb +10 -0
- data/lib/e621_export_downloader.rb +12 -0
- data/sorbet/config +5 -0
- data/sorbet/rbi/annotations/.gitattributes +1 -0
- data/sorbet/rbi/annotations/faraday.rbi +17 -0
- data/sorbet/rbi/annotations/rainbow.rbi +269 -0
- data/sorbet/rbi/gems/.gitattributes +1 -0
- data/sorbet/rbi/gems/ast@2.4.3.rbi +550 -0
- data/sorbet/rbi/gems/benchmark@0.5.0.rbi +621 -0
- data/sorbet/rbi/gems/csv@3.3.5.rbi +4462 -0
- data/sorbet/rbi/gems/date@3.5.1.rbi +391 -0
- data/sorbet/rbi/gems/erb@6.0.4.rbi +1538 -0
- data/sorbet/rbi/gems/erubi@1.13.1.rbi +155 -0
- data/sorbet/rbi/gems/faraday-net_http@3.4.2.rbi +9 -0
- data/sorbet/rbi/gems/faraday@2.14.1.rbi +9 -0
- data/sorbet/rbi/gems/io-console@0.8.2.rbi +9 -0
- data/sorbet/rbi/gems/json@2.19.5.rbi +2240 -0
- data/sorbet/rbi/gems/language_server-protocol@3.17.0.5.rbi +9 -0
- data/sorbet/rbi/gems/lint_roller@1.1.0.rbi +189 -0
- data/sorbet/rbi/gems/logger@1.7.0.rbi +896 -0
- data/sorbet/rbi/gems/net-http@0.9.1.rbi +4029 -0
- data/sorbet/rbi/gems/netrc@0.11.0.rbi +147 -0
- data/sorbet/rbi/gems/parallel@2.1.0.rbi +321 -0
- data/sorbet/rbi/gems/parser@3.3.11.1.rbi +5229 -0
- data/sorbet/rbi/gems/pp@0.6.3.rbi +377 -0
- data/sorbet/rbi/gems/prettyprint@0.2.0.rbi +455 -0
- data/sorbet/rbi/gems/prism@1.9.0.rbi +42224 -0
- data/sorbet/rbi/gems/psych@5.3.1.rbi +2374 -0
- data/sorbet/rbi/gems/racc@1.8.1.rbi +165 -0
- data/sorbet/rbi/gems/rainbow@3.1.1.rbi +362 -0
- data/sorbet/rbi/gems/rake@13.4.2.rbi +3130 -0
- data/sorbet/rbi/gems/rbi@0.3.11.rbi +5505 -0
- data/sorbet/rbi/gems/rbs@4.0.2.rbi +6908 -0
- data/sorbet/rbi/gems/rdoc@7.2.0.rbi +9 -0
- data/sorbet/rbi/gems/regexp_parser@2.12.0.rbi +3398 -0
- data/sorbet/rbi/gems/reline@0.6.3.rbi +2446 -0
- data/sorbet/rbi/gems/require-hooks@0.4.0.rbi +152 -0
- data/sorbet/rbi/gems/rexml@3.4.4.rbi +4905 -0
- data/sorbet/rbi/gems/rubocop-ast@1.49.1.rbi +7062 -0
- data/sorbet/rbi/gems/rubocop-rake@0.7.1.rbi +314 -0
- data/sorbet/rbi/gems/rubocop@1.86.1.rbi +62227 -0
- data/sorbet/rbi/gems/ruby-progressbar@1.13.0.rbi +988 -0
- data/sorbet/rbi/gems/rubydex@0.2.0.rbi +663 -0
- data/sorbet/rbi/gems/spoom@1.7.13.rbi +6151 -0
- data/sorbet/rbi/gems/stringio@3.2.0.rbi +9 -0
- data/sorbet/rbi/gems/tapioca@0.19.1.rbi +3555 -0
- data/sorbet/rbi/gems/thor@1.5.0.rbi +3870 -0
- data/sorbet/rbi/gems/tsort@0.2.0.rbi +389 -0
- data/sorbet/rbi/gems/unicode-display_width@3.2.0.rbi +130 -0
- data/sorbet/rbi/gems/unicode-emoji@4.2.0.rbi +332 -0
- data/sorbet/rbi/gems/uri@1.1.1.rbi +2400 -0
- data/sorbet/rbi/gems/zeitwerk@2.7.5.rbi +1090 -0
- data/sorbet/rbi/shims/faraday.rbi +42 -0
- data/sorbet/rbi/todo.rbi +7 -0
- data/sorbet/tapioca/config.yml +13 -0
- data/sorbet/tapioca/require.rb +4 -0
- metadata +177 -0
|
@@ -0,0 +1,128 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
# typed: strict
|
|
3
|
+
|
|
4
|
+
require("csv")
|
|
5
|
+
|
|
6
|
+
module E621ExportDownloader
|
|
7
|
+
class Export
|
|
8
|
+
extend(T::Sig)
|
|
9
|
+
extend(T::Generic)
|
|
10
|
+
|
|
11
|
+
Model = type_member
|
|
12
|
+
|
|
13
|
+
sig { returns(Date) }
|
|
14
|
+
attr_accessor(:date)
|
|
15
|
+
|
|
16
|
+
sig { returns(ExportHelper[Model]) }
|
|
17
|
+
attr_accessor(:helper)
|
|
18
|
+
|
|
19
|
+
sig { returns(T.nilable(T::Boolean)) }
|
|
20
|
+
# If nil, no check has been performed yet
|
|
21
|
+
attr_accessor(:downloaded)
|
|
22
|
+
|
|
23
|
+
sig { params(date: T.any(Date, DateTime), helper: ExportHelper[Model]).void }
|
|
24
|
+
def initialize(date:, helper:)
|
|
25
|
+
@date = T.let(date.is_a?(DateTime) ? date.to_date : date, Date)
|
|
26
|
+
@helper = helper
|
|
27
|
+
@downloaded = T.let(nil, T.nilable(T::Boolean))
|
|
28
|
+
end
|
|
29
|
+
|
|
30
|
+
sig { returns(T::Boolean) }
|
|
31
|
+
def delete
|
|
32
|
+
return false unless check_downloaded
|
|
33
|
+
FileUtils.rm(file_path)
|
|
34
|
+
@downloaded = false
|
|
35
|
+
true
|
|
36
|
+
end
|
|
37
|
+
|
|
38
|
+
sig { returns(String) }
|
|
39
|
+
def download
|
|
40
|
+
raise(ResolveError, "Export #{helper.type} for #{helper.format_date(date)} does not exist") unless exists?
|
|
41
|
+
if check_downloaded
|
|
42
|
+
helper.client.debug("using cached export for #{helper.type.serialize}", header: %W[export #{helper.format_date(date)}])
|
|
43
|
+
return file_path
|
|
44
|
+
end
|
|
45
|
+
|
|
46
|
+
helper.client.debug("downloading export for #{helper.type.serialize}", header: %W[export #{helper.format_date(date)}])
|
|
47
|
+
|
|
48
|
+
FileUtils.mkdir_p(Constants::TEMP_DIR)
|
|
49
|
+
File.open(file_path, "wb") do |file|
|
|
50
|
+
inflater = Zlib::Inflate.new(Zlib::MAX_WBITS + 16)
|
|
51
|
+
|
|
52
|
+
res = helper.client.connection.get("#{file_name}.gz") do |req|
|
|
53
|
+
req.options.on_data = proc do |chunk, _total|
|
|
54
|
+
decompressed = inflater.inflate(chunk)
|
|
55
|
+
file.write(decompressed) if decompressed && !decompressed.empty?
|
|
56
|
+
end
|
|
57
|
+
end
|
|
58
|
+
|
|
59
|
+
file.write(inflater.finish)
|
|
60
|
+
inflater.close
|
|
61
|
+
file.close
|
|
62
|
+
|
|
63
|
+
unless res.success?
|
|
64
|
+
raise(ResolveError, "Failed to download export #{helper.type.serialize} for #{helper.format_date(date)}: #{res.status} #{res.reason_phrase}")
|
|
65
|
+
end
|
|
66
|
+
rescue # rubocop:disable Style/RescueStandardError
|
|
67
|
+
FileUtils.rm_f(file_path)
|
|
68
|
+
raise
|
|
69
|
+
end
|
|
70
|
+
|
|
71
|
+
@downloaded = true
|
|
72
|
+
file_path
|
|
73
|
+
end
|
|
74
|
+
|
|
75
|
+
sig { returns(T::Boolean) }
|
|
76
|
+
def exists?
|
|
77
|
+
helper.client.connection.head("#{file_name}.gz").success?
|
|
78
|
+
end
|
|
79
|
+
|
|
80
|
+
sig { params(block: T.proc.params(arg0: Model, arg1: Integer).void).void }
|
|
81
|
+
def read(&block)
|
|
82
|
+
download unless check_downloaded
|
|
83
|
+
helper.client.debug("reading export for #{helper.type.serialize}", header: %W[export #{helper.format_date(date)}])
|
|
84
|
+
total = line_count
|
|
85
|
+
CSV.foreach(file_path, headers: true) do |row|
|
|
86
|
+
args = [helper.parser.call(T.cast(row, CSV::Row).to_hash), total]
|
|
87
|
+
args = args.slice(0, block.arity) if block.arity != -1
|
|
88
|
+
block.call(*T.unsafe(args))
|
|
89
|
+
end
|
|
90
|
+
end
|
|
91
|
+
|
|
92
|
+
sig { returns(T::Array[Model]) }
|
|
93
|
+
def read_all
|
|
94
|
+
download unless check_downloaded
|
|
95
|
+
helper.client.debug("reading all records for #{helper.type.serialize}", header: %W[export #{helper.format_date(date)}])
|
|
96
|
+
results = []
|
|
97
|
+
read do |record|
|
|
98
|
+
results << record
|
|
99
|
+
end
|
|
100
|
+
results
|
|
101
|
+
end
|
|
102
|
+
|
|
103
|
+
private
|
|
104
|
+
|
|
105
|
+
sig { returns(Integer) }
|
|
106
|
+
def line_count
|
|
107
|
+
total = 0
|
|
108
|
+
CSV.foreach(file_path) { total += 1 }
|
|
109
|
+
total
|
|
110
|
+
end
|
|
111
|
+
|
|
112
|
+
sig { returns(T::Boolean) }
|
|
113
|
+
def check_downloaded
|
|
114
|
+
return @downloaded unless @downloaded.nil?
|
|
115
|
+
@downloaded = File.exist?(file_path)
|
|
116
|
+
end
|
|
117
|
+
|
|
118
|
+
sig { returns(String) }
|
|
119
|
+
def file_path
|
|
120
|
+
File.join(Constants::TEMP_DIR, file_name)
|
|
121
|
+
end
|
|
122
|
+
|
|
123
|
+
sig { returns(String) }
|
|
124
|
+
def file_name
|
|
125
|
+
"#{helper.type.serialize}-#{helper.format_date(date)}.csv"
|
|
126
|
+
end
|
|
127
|
+
end
|
|
128
|
+
end
|
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
# typed: strict
|
|
3
|
+
|
|
4
|
+
module E621ExportDownloader
|
|
5
|
+
class ExportHelper
|
|
6
|
+
extend(T::Sig)
|
|
7
|
+
extend(T::Generic)
|
|
8
|
+
|
|
9
|
+
Model = type_member
|
|
10
|
+
|
|
11
|
+
sig { returns(Types) }
|
|
12
|
+
attr_reader(:type)
|
|
13
|
+
|
|
14
|
+
sig { returns(Client::Options::Parser) }
|
|
15
|
+
attr_reader(:parser)
|
|
16
|
+
|
|
17
|
+
sig { returns(Client) }
|
|
18
|
+
attr_reader(:client)
|
|
19
|
+
|
|
20
|
+
sig { params(type: Types, parser: Client::Options::Parser, client: Client).void }
|
|
21
|
+
def initialize(type:, parser:, client:)
|
|
22
|
+
@type = type
|
|
23
|
+
@parser = parser
|
|
24
|
+
@client = client
|
|
25
|
+
@rewind_count = T.let(0, Integer)
|
|
26
|
+
end
|
|
27
|
+
|
|
28
|
+
sig { params(date: T.any(Date, DateTime)).returns(String) }
|
|
29
|
+
def format_date(date)
|
|
30
|
+
date.strftime("%Y-%m-%d")
|
|
31
|
+
end
|
|
32
|
+
|
|
33
|
+
sig { params(date: T.any(Date, DateTime)).returns(T::Boolean) }
|
|
34
|
+
def delete(date)
|
|
35
|
+
client.debug("deleting export for #{type.serialize}", header: %W[helper #{format_date(date)}])
|
|
36
|
+
_get(date).delete
|
|
37
|
+
end
|
|
38
|
+
|
|
39
|
+
sig { params(date: T.any(Date, DateTime)).returns(String) }
|
|
40
|
+
def download(date)
|
|
41
|
+
client.debug("downloading export for #{type.serialize}", header: %W[helper #{format_date(date)}])
|
|
42
|
+
_get(date).download
|
|
43
|
+
end
|
|
44
|
+
|
|
45
|
+
sig { params(date: T.any(Date, DateTime)).returns(T::Boolean) }
|
|
46
|
+
def exists?(date)
|
|
47
|
+
client.debug("checking export existence for #{type.serialize}", header: %W[helper #{format_date(date)}])
|
|
48
|
+
_get(date).exists?
|
|
49
|
+
end
|
|
50
|
+
|
|
51
|
+
sig { params(date: T.any(Date, DateTime)).returns(Export[Model]) }
|
|
52
|
+
def get(date)
|
|
53
|
+
client.debug("creating export handle for #{type.serialize}", header: %W[helper #{format_date(date)}])
|
|
54
|
+
Export.new(date: date, helper: self)
|
|
55
|
+
end
|
|
56
|
+
|
|
57
|
+
private
|
|
58
|
+
|
|
59
|
+
sig { returns(Integer) }
|
|
60
|
+
def max_rewind_count
|
|
61
|
+
return 0 if client.options.rewind_on_not_found == false
|
|
62
|
+
return 2 if client.options.rewind_on_not_found == true
|
|
63
|
+
T.cast(client.options.rewind_on_not_found, Integer)
|
|
64
|
+
end
|
|
65
|
+
|
|
66
|
+
sig { params(date: T.any(Date, DateTime), original_date: T.any(Date, DateTime)).returns(Export[Model]) }
|
|
67
|
+
def _get(date, original_date = date)
|
|
68
|
+
export = get(date)
|
|
69
|
+
if export.exists?
|
|
70
|
+
client.debug("resolved export for #{type.serialize}", header: %w[helper])
|
|
71
|
+
return export
|
|
72
|
+
end
|
|
73
|
+
|
|
74
|
+
if @rewind_count < max_rewind_count
|
|
75
|
+
@rewind_count += 1
|
|
76
|
+
client.debug("rewinding export lookup for #{type.serialize} from #{format_date(original_date)} (attempt #{@rewind_count}/#{max_rewind_count})", header: %w[helper])
|
|
77
|
+
return _get(date.to_date + 1, original_date)
|
|
78
|
+
end
|
|
79
|
+
|
|
80
|
+
raise(ResolveError, "Export #{type.serialize} for #{format_date(original_date)} does not exist, and either rewinding is not allowed or the maximum rewind limit has been reached")
|
|
81
|
+
end
|
|
82
|
+
end
|
|
83
|
+
end
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
# typed: strong
|
|
3
|
+
|
|
4
|
+
module E621ExportDownloader
|
|
5
|
+
module Models
|
|
6
|
+
class Pool
|
|
7
|
+
extend(T::Sig)
|
|
8
|
+
|
|
9
|
+
sig { returns(T::Hash[String, String]) }
|
|
10
|
+
attr_reader(:record)
|
|
11
|
+
|
|
12
|
+
sig { returns(String) }
|
|
13
|
+
attr_reader(:category)
|
|
14
|
+
|
|
15
|
+
sig { returns(DateTime) }
|
|
16
|
+
attr_reader(:created_at)
|
|
17
|
+
|
|
18
|
+
sig { returns(Integer) }
|
|
19
|
+
attr_reader(:creator_id)
|
|
20
|
+
|
|
21
|
+
sig { returns(String) }
|
|
22
|
+
attr_reader(:description)
|
|
23
|
+
|
|
24
|
+
sig { returns(Integer) }
|
|
25
|
+
attr_reader(:id)
|
|
26
|
+
|
|
27
|
+
sig { returns(T::Boolean) }
|
|
28
|
+
attr_reader(:is_active)
|
|
29
|
+
|
|
30
|
+
sig { returns(String) }
|
|
31
|
+
attr_reader(:name)
|
|
32
|
+
|
|
33
|
+
sig { returns(T::Array[Integer]) }
|
|
34
|
+
attr_reader(:post_ids)
|
|
35
|
+
|
|
36
|
+
sig { returns(T.nilable(DateTime)) }
|
|
37
|
+
attr_reader(:updated_at)
|
|
38
|
+
|
|
39
|
+
sig { params(record: T::Hash[String, String]).void }
|
|
40
|
+
def initialize(record)
|
|
41
|
+
@record = T.let(record, T::Hash[String, String])
|
|
42
|
+
@category = T.let(T.must(record["category"]), String)
|
|
43
|
+
@created_at = T.let(DateTime.parse(record["created_at"]), DateTime)
|
|
44
|
+
@creator_id = T.let(record["creator_id"].to_i, Integer)
|
|
45
|
+
@description = T.let(T.must(record["description"]), String)
|
|
46
|
+
@id = T.let(record["id"].to_i, Integer)
|
|
47
|
+
@is_active = T.let(record["is_active"] == "t", T::Boolean)
|
|
48
|
+
@name = T.let(T.must(record["name"]), String)
|
|
49
|
+
@post_ids = T.let(T.must(T.must(record["post_ids"])[1..-2]).split(",").map(&:to_i), T::Array[Integer])
|
|
50
|
+
@updated_at = T.let(T.must(record["updated_at"]).empty? ? nil : DateTime.parse(record["updated_at"]), T.nilable(DateTime))
|
|
51
|
+
end
|
|
52
|
+
|
|
53
|
+
sig { params(_args: T.untyped).returns(String) }
|
|
54
|
+
def to_json(*_args)
|
|
55
|
+
{
|
|
56
|
+
category: @category,
|
|
57
|
+
created_at: @created_at,
|
|
58
|
+
creator_id: @creator_id,
|
|
59
|
+
description: @description,
|
|
60
|
+
id: @id,
|
|
61
|
+
is_active: @is_active,
|
|
62
|
+
name: @name,
|
|
63
|
+
post_ids: @post_ids,
|
|
64
|
+
updated_at: @updated_at,
|
|
65
|
+
}.to_json
|
|
66
|
+
end
|
|
67
|
+
end
|
|
68
|
+
end
|
|
69
|
+
end
|
|
@@ -0,0 +1,166 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
# typed: strict
|
|
3
|
+
|
|
4
|
+
module E621ExportDownloader
|
|
5
|
+
module Models
|
|
6
|
+
class Post
|
|
7
|
+
extend(T::Sig)
|
|
8
|
+
|
|
9
|
+
sig { returns(T.nilable(Integer)) }
|
|
10
|
+
attr_reader(:approver_id)
|
|
11
|
+
|
|
12
|
+
sig { returns(Integer) }
|
|
13
|
+
attr_reader(:change_seq)
|
|
14
|
+
|
|
15
|
+
sig { returns(Integer) }
|
|
16
|
+
attr_reader(:comment_count)
|
|
17
|
+
|
|
18
|
+
sig { returns(DateTime) }
|
|
19
|
+
attr_reader(:created_at)
|
|
20
|
+
|
|
21
|
+
sig { returns(String) }
|
|
22
|
+
attr_reader(:description)
|
|
23
|
+
|
|
24
|
+
sig { returns(Integer) }
|
|
25
|
+
attr_reader(:down_score)
|
|
26
|
+
|
|
27
|
+
sig { returns(T.nilable(Float)) }
|
|
28
|
+
attr_reader(:duration)
|
|
29
|
+
|
|
30
|
+
sig { returns(Integer) }
|
|
31
|
+
attr_reader(:fav_count)
|
|
32
|
+
|
|
33
|
+
sig { returns(String) }
|
|
34
|
+
attr_reader(:file_ext)
|
|
35
|
+
|
|
36
|
+
sig { returns(Integer) }
|
|
37
|
+
attr_reader(:file_size)
|
|
38
|
+
|
|
39
|
+
sig { returns(Integer) }
|
|
40
|
+
attr_reader(:id)
|
|
41
|
+
|
|
42
|
+
sig { returns(Integer) }
|
|
43
|
+
attr_reader(:image_height)
|
|
44
|
+
|
|
45
|
+
sig { returns(Integer) }
|
|
46
|
+
attr_reader(:image_width)
|
|
47
|
+
|
|
48
|
+
sig { returns(T::Boolean) }
|
|
49
|
+
attr_reader(:is_deleted)
|
|
50
|
+
|
|
51
|
+
sig { returns(T::Boolean) }
|
|
52
|
+
attr_reader(:is_flagged)
|
|
53
|
+
|
|
54
|
+
sig { returns(T::Boolean) }
|
|
55
|
+
attr_reader(:is_note_locked)
|
|
56
|
+
|
|
57
|
+
sig { returns(T::Boolean) }
|
|
58
|
+
attr_reader(:is_pending)
|
|
59
|
+
|
|
60
|
+
sig { returns(T::Boolean) }
|
|
61
|
+
attr_reader(:is_rating_locked)
|
|
62
|
+
|
|
63
|
+
sig { returns(T::Boolean) }
|
|
64
|
+
attr_reader(:is_status_locked)
|
|
65
|
+
|
|
66
|
+
sig { returns(String) }
|
|
67
|
+
attr_reader(:locked_tags)
|
|
68
|
+
|
|
69
|
+
sig { returns(T.nilable(String)) }
|
|
70
|
+
attr_reader(:md5)
|
|
71
|
+
|
|
72
|
+
sig { returns(T.nilable(Integer)) }
|
|
73
|
+
attr_reader(:parent_id)
|
|
74
|
+
|
|
75
|
+
sig { returns(String) }
|
|
76
|
+
attr_reader(:rating)
|
|
77
|
+
|
|
78
|
+
sig { returns(Integer) }
|
|
79
|
+
attr_reader(:score)
|
|
80
|
+
|
|
81
|
+
sig { returns(T::Array[String]) }
|
|
82
|
+
attr_reader(:sources)
|
|
83
|
+
|
|
84
|
+
sig { returns(T::Array[String]) }
|
|
85
|
+
attr_reader(:tags)
|
|
86
|
+
|
|
87
|
+
sig { returns(Integer) }
|
|
88
|
+
attr_reader(:up_score)
|
|
89
|
+
|
|
90
|
+
sig { returns(T.nilable(DateTime)) }
|
|
91
|
+
attr_reader(:updated_at)
|
|
92
|
+
|
|
93
|
+
sig { returns(T.nilable(Integer)) }
|
|
94
|
+
attr_reader(:uploader_id)
|
|
95
|
+
|
|
96
|
+
sig { params(record: T::Hash[String, String]).void }
|
|
97
|
+
def initialize(record)
|
|
98
|
+
@record = T.let(record, T::Hash[String, String])
|
|
99
|
+
@approver_id = T.let(T.must(record["approver_id"]).empty? ? nil : record["approver_id"].to_i, T.nilable(Integer))
|
|
100
|
+
@change_seq = T.let(record["change_seq"].to_i, Integer)
|
|
101
|
+
@comment_count = T.let(record["comment_count"].to_i, Integer)
|
|
102
|
+
@created_at = T.let(DateTime.parse(record["created_at"]), DateTime)
|
|
103
|
+
@description = T.let(T.must(record["description"]).gsub("\r\n", "\n"), String)
|
|
104
|
+
@down_score = T.let(record["down_score"].to_i, Integer)
|
|
105
|
+
@duration = T.let(T.must(record["duration"]).empty? ? nil : record["duration"].to_f, T.nilable(Float))
|
|
106
|
+
@fav_count = T.let(record["fav_count"].to_i, Integer)
|
|
107
|
+
@file_ext = T.let(T.must(record["file_ext"]), String)
|
|
108
|
+
@file_size = T.let(record["file_size"].to_i, Integer)
|
|
109
|
+
@id = T.let(record["id"].to_i, Integer)
|
|
110
|
+
@image_height = T.let(record["image_height"].to_i, Integer)
|
|
111
|
+
@image_width = T.let(record["image_width"].to_i, Integer)
|
|
112
|
+
@is_deleted = T.let(record["is_deleted"] == "t", T::Boolean)
|
|
113
|
+
@is_flagged = T.let(record["is_flagged"] == "t", T::Boolean)
|
|
114
|
+
@is_note_locked = T.let(record["is_note_locked"] == "t", T::Boolean)
|
|
115
|
+
@is_pending = T.let(record["is_pending"] == "t", T::Boolean)
|
|
116
|
+
@is_rating_locked = T.let(record["is_rating_locked"] == "t", T::Boolean)
|
|
117
|
+
@is_status_locked = T.let(record["is_rating_locked"] == "t", T::Boolean)
|
|
118
|
+
@locked_tags = T.let(T.must(record["locked_tags"]), String)
|
|
119
|
+
@md5 = T.let(T.must(record["md5"]).empty? ? nil : record["md5"], T.nilable(String))
|
|
120
|
+
@parent_id = T.let(T.must(record["parent_id"]).empty? ? nil : record["parent_id"].to_i, T.nilable(Integer))
|
|
121
|
+
@rating = T.let(T.must(record["rating"]), String)
|
|
122
|
+
@score = T.let(record["score"].to_i, Integer)
|
|
123
|
+
@sources = T.let(T.must(record["source"]).gsub("\r\n", "\n").split("\n"), T::Array[String])
|
|
124
|
+
@tags = T.let(T.must(record["tag_string"]).split, T::Array[String])
|
|
125
|
+
@up_score = T.let(record["up_score"].to_i, Integer)
|
|
126
|
+
@updated_at = T.let(T.must(record["updated_at"]).empty? ? nil : DateTime.parse(record["updated_at"]), T.nilable(DateTime))
|
|
127
|
+
@uploader_id = T.let(T.must(record["uploader_id"]).empty? ? nil : record["uploader_id"].to_i, T.nilable(Integer))
|
|
128
|
+
end
|
|
129
|
+
|
|
130
|
+
sig { params(_args: T.untyped).returns(String) }
|
|
131
|
+
def to_json(*_args)
|
|
132
|
+
{
|
|
133
|
+
approver_id: @approver_id,
|
|
134
|
+
change_seq: @change_seq,
|
|
135
|
+
comment_count: @comment_count,
|
|
136
|
+
created_at: @created_at,
|
|
137
|
+
description: @description,
|
|
138
|
+
down_score: @down_score,
|
|
139
|
+
duration: @duration,
|
|
140
|
+
fav_count: @fav_count,
|
|
141
|
+
file_ext: @file_ext,
|
|
142
|
+
file_size: @file_size,
|
|
143
|
+
id: @id,
|
|
144
|
+
image_height: @image_height,
|
|
145
|
+
image_width: @image_width,
|
|
146
|
+
is_deleted: @is_deleted,
|
|
147
|
+
is_flagged: @is_flagged,
|
|
148
|
+
is_note_locked: @is_note_locked,
|
|
149
|
+
is_pending: @is_pending,
|
|
150
|
+
is_rating_locked: @is_rating_locked,
|
|
151
|
+
is_status_locked: @is_status_locked,
|
|
152
|
+
locked_tags: @locked_tags,
|
|
153
|
+
md5: @md5,
|
|
154
|
+
parent_id: @parent_id,
|
|
155
|
+
rating: @rating,
|
|
156
|
+
score: @score,
|
|
157
|
+
sources: @sources,
|
|
158
|
+
tags: @tags,
|
|
159
|
+
up_score: @up_score,
|
|
160
|
+
updated_at: @updated_at,
|
|
161
|
+
uploader_id: @uploader_id,
|
|
162
|
+
}.to_json
|
|
163
|
+
end
|
|
164
|
+
end
|
|
165
|
+
end
|
|
166
|
+
end
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
# typed: strict
|
|
3
|
+
|
|
4
|
+
module E621ExportDownloader
|
|
5
|
+
module Models
|
|
6
|
+
class Tag
|
|
7
|
+
extend(T::Sig)
|
|
8
|
+
|
|
9
|
+
sig { returns(String) }
|
|
10
|
+
attr_reader(:category)
|
|
11
|
+
|
|
12
|
+
sig { returns(Integer) }
|
|
13
|
+
attr_reader(:id)
|
|
14
|
+
|
|
15
|
+
sig { returns(String) }
|
|
16
|
+
attr_reader(:name)
|
|
17
|
+
|
|
18
|
+
sig { returns(Integer) }
|
|
19
|
+
attr_reader(:post_count)
|
|
20
|
+
|
|
21
|
+
sig { params(record: T::Hash[String, String]).void }
|
|
22
|
+
def initialize(record)
|
|
23
|
+
@record = T.let(record, T::Hash[String, String])
|
|
24
|
+
@category = T.let(T.must(record["category"]), String)
|
|
25
|
+
@id = T.let(record["id"].to_i, Integer)
|
|
26
|
+
@name = T.let(T.must(record["name"]), String)
|
|
27
|
+
@post_count = T.let(record["post_count"].to_i, Integer)
|
|
28
|
+
end
|
|
29
|
+
|
|
30
|
+
sig { params(_args: T.untyped).returns(String) }
|
|
31
|
+
def to_json(*_args)
|
|
32
|
+
{
|
|
33
|
+
category: @category,
|
|
34
|
+
id: @id,
|
|
35
|
+
name: @name,
|
|
36
|
+
post_count: @post_count,
|
|
37
|
+
}.to_json
|
|
38
|
+
end
|
|
39
|
+
end
|
|
40
|
+
end
|
|
41
|
+
end
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
# typed: strict
|
|
3
|
+
|
|
4
|
+
module E621ExportDownloader
|
|
5
|
+
module Models
|
|
6
|
+
class TagAlias
|
|
7
|
+
extend(T::Sig)
|
|
8
|
+
|
|
9
|
+
sig { returns(String) }
|
|
10
|
+
attr_reader(:antecedent_name)
|
|
11
|
+
|
|
12
|
+
sig { returns(String) }
|
|
13
|
+
attr_reader(:consequent_name)
|
|
14
|
+
|
|
15
|
+
sig { returns(T.nilable(DateTime)) }
|
|
16
|
+
attr_reader(:created_at)
|
|
17
|
+
|
|
18
|
+
sig { returns(Integer) }
|
|
19
|
+
attr_reader(:id)
|
|
20
|
+
|
|
21
|
+
sig { returns(String) }
|
|
22
|
+
attr_reader(:status)
|
|
23
|
+
|
|
24
|
+
sig { params(record: T::Hash[String, String]).void }
|
|
25
|
+
def initialize(record)
|
|
26
|
+
@record = T.let(record, T::Hash[String, String])
|
|
27
|
+
@antecedent_name = T.let(T.must(record["antecedent_name"]), String)
|
|
28
|
+
@consequent_name = T.let(T.must(record["consequent_name"]), String)
|
|
29
|
+
@created_at = T.let(T.must(record["created_at"]).empty? ? nil : DateTime.parse(record["created_at"]), T.nilable(DateTime))
|
|
30
|
+
@id = T.let(record["id"].to_i, Integer)
|
|
31
|
+
@status = T.let(T.must(record["status"]), String)
|
|
32
|
+
end
|
|
33
|
+
|
|
34
|
+
sig { params(_args: T.untyped).returns(String) }
|
|
35
|
+
def to_json(*_args)
|
|
36
|
+
{
|
|
37
|
+
antecedent_name: @antecedent_name,
|
|
38
|
+
consequent_name: @consequent_name,
|
|
39
|
+
created_at: @created_at,
|
|
40
|
+
id: @id,
|
|
41
|
+
status: @status,
|
|
42
|
+
}.to_json
|
|
43
|
+
end
|
|
44
|
+
end
|
|
45
|
+
end
|
|
46
|
+
end
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
# typed: strict
|
|
3
|
+
|
|
4
|
+
module E621ExportDownloader
|
|
5
|
+
module Models
|
|
6
|
+
class TagImplication
|
|
7
|
+
extend(T::Sig)
|
|
8
|
+
|
|
9
|
+
sig { returns(String) }
|
|
10
|
+
attr_reader(:antecedent_name)
|
|
11
|
+
|
|
12
|
+
sig { returns(String) }
|
|
13
|
+
attr_reader(:consequent_name)
|
|
14
|
+
|
|
15
|
+
sig { returns(T.nilable(DateTime)) }
|
|
16
|
+
attr_reader(:created_at)
|
|
17
|
+
|
|
18
|
+
sig { returns(Integer) }
|
|
19
|
+
attr_reader(:id)
|
|
20
|
+
|
|
21
|
+
sig { returns(String) }
|
|
22
|
+
attr_reader(:status)
|
|
23
|
+
|
|
24
|
+
sig { params(record: T::Hash[String, String]).void }
|
|
25
|
+
def initialize(record)
|
|
26
|
+
@record = T.let(record, T::Hash[String, String])
|
|
27
|
+
@antecedent_name = T.let(T.must(record["antecedent_name"]), String)
|
|
28
|
+
@consequent_name = T.let(T.must(record["consequent_name"]), String)
|
|
29
|
+
@created_at = T.let(T.must(record["created_at"]).empty? ? nil : DateTime.parse(record["created_at"]), T.nilable(DateTime))
|
|
30
|
+
@id = T.let(record["id"].to_i, Integer)
|
|
31
|
+
@status = T.let(T.must(record["status"]), String)
|
|
32
|
+
end
|
|
33
|
+
|
|
34
|
+
sig { params(_args: T.untyped).returns(String) }
|
|
35
|
+
def to_json(*_args)
|
|
36
|
+
{
|
|
37
|
+
antecedent_name: @antecedent_name,
|
|
38
|
+
consequent_name: @consequent_name,
|
|
39
|
+
created_at: @created_at,
|
|
40
|
+
id: @id,
|
|
41
|
+
status: @status,
|
|
42
|
+
}.to_json
|
|
43
|
+
end
|
|
44
|
+
end
|
|
45
|
+
end
|
|
46
|
+
end
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
# typed: strict
|
|
3
|
+
|
|
4
|
+
module E621ExportDownloader
|
|
5
|
+
module Models
|
|
6
|
+
class WikiPage
|
|
7
|
+
extend(T::Sig)
|
|
8
|
+
|
|
9
|
+
sig { returns(String) }
|
|
10
|
+
attr_reader(:body)
|
|
11
|
+
|
|
12
|
+
sig { returns(DateTime) }
|
|
13
|
+
attr_reader(:created_at)
|
|
14
|
+
|
|
15
|
+
sig { returns(T.nilable(Integer)) }
|
|
16
|
+
attr_reader(:creator_id)
|
|
17
|
+
|
|
18
|
+
sig { returns(Integer) }
|
|
19
|
+
attr_reader(:id)
|
|
20
|
+
|
|
21
|
+
sig { returns(T::Boolean) }
|
|
22
|
+
attr_reader(:is_locked)
|
|
23
|
+
|
|
24
|
+
sig { returns(String) }
|
|
25
|
+
attr_reader(:title)
|
|
26
|
+
|
|
27
|
+
sig { returns(T.nilable(DateTime)) }
|
|
28
|
+
attr_reader(:updated_at)
|
|
29
|
+
|
|
30
|
+
sig { returns(T.nilable(Integer)) }
|
|
31
|
+
attr_reader(:uploader_id)
|
|
32
|
+
|
|
33
|
+
sig { params(record: T::Hash[String, String]).void }
|
|
34
|
+
def initialize(record)
|
|
35
|
+
@record = T.let(record, T::Hash[String, String])
|
|
36
|
+
@body = T.let(T.must(record["body"]).gsub("\r\n", "\n"), String)
|
|
37
|
+
@created_at = T.let(DateTime.parse(record["created_at"]), DateTime)
|
|
38
|
+
@creator_id = T.let(T.must(record["creator_id"]).empty? ? nil : record["creator_id"].to_i, T.nilable(Integer))
|
|
39
|
+
@id = T.let(record["id"].to_i, Integer)
|
|
40
|
+
@is_locked = T.let(record["is_locked"] == "t", T::Boolean)
|
|
41
|
+
@title = T.let(T.must(record["title"]), String)
|
|
42
|
+
@updated_at = T.let(T.must(record["updated_at"]).empty? ? nil : DateTime.parse(record["updated_at"]), T.nilable(DateTime))
|
|
43
|
+
@uploader_id = T.let(T.must(record["uploader_id"]).empty? ? nil : record["uploader_id"].to_i, T.nilable(Integer))
|
|
44
|
+
end
|
|
45
|
+
|
|
46
|
+
sig { params(_args: T.untyped).returns(String) }
|
|
47
|
+
def to_json(*_args)
|
|
48
|
+
{
|
|
49
|
+
body: @body,
|
|
50
|
+
created_at: @created_at,
|
|
51
|
+
creator_id: @creator_id,
|
|
52
|
+
id: @id,
|
|
53
|
+
is_locked: @is_locked,
|
|
54
|
+
title: @title,
|
|
55
|
+
updated_at: @updated_at,
|
|
56
|
+
uploader_id: @uploader_id,
|
|
57
|
+
}.to_json
|
|
58
|
+
end
|
|
59
|
+
end
|
|
60
|
+
end
|
|
61
|
+
end
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require("zeitwerk")
|
|
4
|
+
require("sorbet-runtime")
|
|
5
|
+
|
|
6
|
+
loader = Zeitwerk::Loader.for_gem
|
|
7
|
+
loader.ignore("#{__dir__}/e621_export_downloader/version.rb")
|
|
8
|
+
loader.setup
|
|
9
|
+
|
|
10
|
+
module E621ExportDownloader
|
|
11
|
+
class ResolveError < StandardError; end
|
|
12
|
+
end
|
data/sorbet/config
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
**/*.rbi linguist-vendored=true
|