bas 0.4.0 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +12 -0
- data/README.md +68 -147
- data/lib/bas/bot/base.rb +74 -0
- data/lib/bas/bot/compare_wip_limit_count.rb +92 -0
- data/lib/bas/bot/fetch_birthdays_from_notion.rb +128 -0
- data/lib/bas/bot/fetch_domains_wip_counts_from_notion.rb +121 -0
- data/lib/bas/bot/fetch_domains_wip_limit_from_notion.rb +134 -0
- data/lib/bas/bot/fetch_emails_from_imap.rb +99 -0
- data/lib/bas/bot/fetch_next_week_birthdays_from_notion.rb +142 -0
- data/lib/bas/bot/fetch_next_week_ptos_from_notion.rb +162 -0
- data/lib/bas/bot/fetch_ptos_from_notion.rb +138 -0
- data/lib/bas/bot/format_birthdays.rb +97 -0
- data/lib/bas/bot/format_emails.rb +124 -0
- data/lib/bas/bot/format_wip_limit_exceeded.rb +97 -0
- data/lib/bas/bot/garbage_collector.rb +85 -0
- data/lib/bas/bot/humanize_pto.rb +119 -0
- data/lib/bas/bot/notify_discord.rb +96 -0
- data/lib/bas/read/base.rb +10 -23
- data/lib/bas/read/default.rb +16 -0
- data/lib/bas/read/postgres.rb +44 -0
- data/lib/bas/read/types/response.rb +18 -0
- data/lib/bas/utils/discord/integration.rb +43 -0
- data/lib/bas/utils/exceptions/function_not_implemented.rb +16 -0
- data/lib/bas/utils/exceptions/invalid_process_response.rb +16 -0
- data/lib/bas/utils/imap/request.rb +76 -0
- data/lib/bas/utils/notion/request.rb +45 -0
- data/lib/bas/utils/openai/run_assistant.rb +99 -0
- data/lib/bas/utils/postgres/request.rb +50 -0
- data/lib/bas/version.rb +1 -1
- data/lib/bas/write/base.rb +12 -17
- data/lib/bas/write/postgres.rb +45 -0
- data/lib/bas/write/postgres_update.rb +49 -0
- data/lib/bas.rb +1 -3
- metadata +30 -67
- data/lib/bas/domain/birthday.rb +0 -25
- data/lib/bas/domain/email.rb +0 -34
- data/lib/bas/domain/exceptions/function_not_implemented.rb +0 -18
- data/lib/bas/domain/issue.rb +0 -22
- data/lib/bas/domain/notification.rb +0 -23
- data/lib/bas/domain/pto.rb +0 -69
- data/lib/bas/domain/work_items_limit.rb +0 -25
- data/lib/bas/formatter/base.rb +0 -53
- data/lib/bas/formatter/birthday.rb +0 -38
- data/lib/bas/formatter/exceptions/invalid_data.rb +0 -15
- data/lib/bas/formatter/notification.rb +0 -34
- data/lib/bas/formatter/pto.rb +0 -89
- data/lib/bas/formatter/support_emails.rb +0 -73
- data/lib/bas/formatter/types/response.rb +0 -16
- data/lib/bas/formatter/work_items_limit.rb +0 -68
- data/lib/bas/process/base.rb +0 -39
- data/lib/bas/process/discord/exceptions/invalid_webhook_token.rb +0 -16
- data/lib/bas/process/discord/implementation.rb +0 -71
- data/lib/bas/process/discord/types/response.rb +0 -22
- data/lib/bas/process/openai/base.rb +0 -72
- data/lib/bas/process/openai/helper.rb +0 -19
- data/lib/bas/process/openai/types/response.rb +0 -27
- data/lib/bas/process/openai/use_case/humanize_pto.rb +0 -53
- data/lib/bas/process/slack/exceptions/invalid_webhook_token.rb +0 -16
- data/lib/bas/process/slack/implementation.rb +0 -70
- data/lib/bas/process/slack/types/response.rb +0 -21
- data/lib/bas/process/types/response.rb +0 -16
- data/lib/bas/read/github/base.rb +0 -57
- data/lib/bas/read/github/types/response.rb +0 -27
- data/lib/bas/read/github/use_case/repo_issues.rb +0 -17
- data/lib/bas/read/imap/base.rb +0 -70
- data/lib/bas/read/imap/types/response.rb +0 -27
- data/lib/bas/read/imap/use_case/support_emails.rb +0 -26
- data/lib/bas/read/notion/base.rb +0 -52
- data/lib/bas/read/notion/exceptions/invalid_api_key.rb +0 -15
- data/lib/bas/read/notion/exceptions/invalid_database_id.rb +0 -15
- data/lib/bas/read/notion/helper.rb +0 -21
- data/lib/bas/read/notion/types/response.rb +0 -26
- data/lib/bas/read/notion/use_case/birthday_next_week.rb +0 -41
- data/lib/bas/read/notion/use_case/birthday_today.rb +0 -29
- data/lib/bas/read/notion/use_case/notification.rb +0 -28
- data/lib/bas/read/notion/use_case/pto_next_week.rb +0 -71
- data/lib/bas/read/notion/use_case/pto_today.rb +0 -30
- data/lib/bas/read/notion/use_case/work_items_limit.rb +0 -37
- data/lib/bas/read/postgres/base.rb +0 -46
- data/lib/bas/read/postgres/helper.rb +0 -16
- data/lib/bas/read/postgres/types/response.rb +0 -42
- data/lib/bas/read/postgres/use_case/pto_today.rb +0 -32
- data/lib/bas/serialize/base.rb +0 -30
- data/lib/bas/serialize/github/issues.rb +0 -57
- data/lib/bas/serialize/imap/support_emails.rb +0 -56
- data/lib/bas/serialize/notion/birthday_today.rb +0 -68
- data/lib/bas/serialize/notion/notification.rb +0 -56
- data/lib/bas/serialize/notion/pto_today.rb +0 -75
- data/lib/bas/serialize/notion/work_items_limit.rb +0 -65
- data/lib/bas/serialize/postgres/pto_today.rb +0 -47
- data/lib/bas/use_cases/types/config.rb +0 -20
- data/lib/bas/use_cases/use_case.rb +0 -42
- data/lib/bas/use_cases/use_cases.rb +0 -465
- data/lib/bas/write/logs/base.rb +0 -33
- data/lib/bas/write/logs/use_case/console_log.rb +0 -22
- data/lib/bas/write/notion/base.rb +0 -36
- data/lib/bas/write/notion/use_case/empty_notification.rb +0 -38
- data/lib/bas/write/notion/use_case/notification.rb +0 -38
@@ -0,0 +1,121 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require_relative "./base"
|
4
|
+
require_relative "../read/default"
|
5
|
+
require_relative "../utils/notion/request"
|
6
|
+
require_relative "../write/postgres"
|
7
|
+
|
8
|
+
module Bot
|
9
|
+
##
|
10
|
+
# The Bot::FetchDomainsWipCountsFromNotion class serves as a bot implementation to fetch work items
|
11
|
+
# in progress or in hold from a Notion database, count how many are by domain, and write them on a
|
12
|
+
# PostgresDB table with a specific format.
|
13
|
+
#
|
14
|
+
# <br>
|
15
|
+
# <b>Example</b>
|
16
|
+
#
|
17
|
+
# options = {
|
18
|
+
# process_options: {
|
19
|
+
# database_id: "notion database id",
|
20
|
+
# secret: "notion secret"
|
21
|
+
# },
|
22
|
+
# write_options: {
|
23
|
+
# connection: {
|
24
|
+
# host: "host",
|
25
|
+
# port: 5432,
|
26
|
+
# dbname: "bas",
|
27
|
+
# user: "postgres",
|
28
|
+
# password: "postgres"
|
29
|
+
# },
|
30
|
+
# db_table: "use_cases",
|
31
|
+
# tag: "FetchDomainsWipCountsFromNotion"
|
32
|
+
# }
|
33
|
+
# }
|
34
|
+
#
|
35
|
+
# bot = Bot::FetchDomainsWipCountsFromNotion.new(options)
|
36
|
+
# bot.execute
|
37
|
+
#
|
38
|
+
class FetchDomainsWipCountsFromNotion < Bot::Base
|
39
|
+
# Read function to execute the default Read component
|
40
|
+
#
|
41
|
+
def read
|
42
|
+
reader = Read::Default.new
|
43
|
+
|
44
|
+
reader.execute
|
45
|
+
end
|
46
|
+
|
47
|
+
# Process function to execute the Notion utility to fetch work item from the notion database
|
48
|
+
#
|
49
|
+
def process
|
50
|
+
response = Utils::Notion::Request.execute(params)
|
51
|
+
|
52
|
+
if response.code == 200
|
53
|
+
work_items_domains = normalize_response(response.parsed_response["results"])
|
54
|
+
domain_wip_count = count_domain_items(work_items_domains)
|
55
|
+
|
56
|
+
{ success: { domain_wip_count: } }
|
57
|
+
else
|
58
|
+
{ error: { message: response.parsed_response, status_code: response.code } }
|
59
|
+
end
|
60
|
+
end
|
61
|
+
|
62
|
+
# Write function to execute the PostgresDB write component
|
63
|
+
#
|
64
|
+
def write
|
65
|
+
write = Write::Postgres.new(write_options, process_response)
|
66
|
+
|
67
|
+
write.execute
|
68
|
+
end
|
69
|
+
|
70
|
+
private
|
71
|
+
|
72
|
+
def params
|
73
|
+
{
|
74
|
+
endpoint: "databases/#{process_options[:database_id]}/query",
|
75
|
+
secret: process_options[:secret],
|
76
|
+
method: "post",
|
77
|
+
body:
|
78
|
+
}
|
79
|
+
end
|
80
|
+
|
81
|
+
def body
|
82
|
+
{
|
83
|
+
filter: {
|
84
|
+
"and": [
|
85
|
+
{ property: "OK", formula: { string: { contains: "✅" } } },
|
86
|
+
{ "or": status_conditions }
|
87
|
+
]
|
88
|
+
}
|
89
|
+
}
|
90
|
+
end
|
91
|
+
|
92
|
+
def status_conditions
|
93
|
+
[
|
94
|
+
{ property: "Status", status: { equals: "In Progress" } },
|
95
|
+
{ property: "Status", status: { equals: "On Hold" } }
|
96
|
+
]
|
97
|
+
end
|
98
|
+
|
99
|
+
def normalize_response(results)
|
100
|
+
return [] if results.nil?
|
101
|
+
|
102
|
+
results.map do |pto|
|
103
|
+
work_item_fields = pto["properties"]
|
104
|
+
|
105
|
+
{
|
106
|
+
"domain" => extract_domain_field_value(work_item_fields["Responsible domain"])
|
107
|
+
}
|
108
|
+
end
|
109
|
+
end
|
110
|
+
|
111
|
+
def extract_domain_field_value(data)
|
112
|
+
data["select"]["name"]
|
113
|
+
end
|
114
|
+
|
115
|
+
def count_domain_items(work_items_list)
|
116
|
+
domain_work_items = work_items_list.group_by { |work_item| work_item["domain"] }
|
117
|
+
|
118
|
+
domain_work_items.transform_values(&:count)
|
119
|
+
end
|
120
|
+
end
|
121
|
+
end
|
@@ -0,0 +1,134 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require_relative "./base"
|
4
|
+
require_relative "../read/postgres"
|
5
|
+
require_relative "../utils/notion/request"
|
6
|
+
require_relative "../write/postgres"
|
7
|
+
|
8
|
+
module Bot
|
9
|
+
##
|
10
|
+
# The Bot::FetchDomainsWipLimitFromNotion class serves as a bot implementation to fetch domains wip
|
11
|
+
# limits from a Notion database, merge them with the count of how many are by domain, and write them
|
12
|
+
# on a PostgresDB table with a specific format.
|
13
|
+
#
|
14
|
+
# <br>
|
15
|
+
# <b>Example</b>
|
16
|
+
#
|
17
|
+
# options = {
|
18
|
+
# read_options: {
|
19
|
+
# connection: {
|
20
|
+
# host: "host",
|
21
|
+
# port: 5432,
|
22
|
+
# dbname: "bas",
|
23
|
+
# user: "postgres",
|
24
|
+
# password: "postgres"
|
25
|
+
# },
|
26
|
+
# db_table: "use_cases",
|
27
|
+
# tag: "FetchDomainsWipCountsFromNotion"
|
28
|
+
# },
|
29
|
+
# process_options: {
|
30
|
+
# database_id: "notion database id",
|
31
|
+
# secret: "notion secret"
|
32
|
+
# },
|
33
|
+
# write_options: {
|
34
|
+
# connection: {
|
35
|
+
# host: "host",
|
36
|
+
# port: 5432,
|
37
|
+
# dbname: "bas",
|
38
|
+
# user: "postgres",
|
39
|
+
# password: "postgres"
|
40
|
+
# },
|
41
|
+
# db_table: "use_cases",
|
42
|
+
# tag: "FetchDomainsWipLimitFromNotion"
|
43
|
+
# }
|
44
|
+
# }
|
45
|
+
#
|
46
|
+
# bot = Bot::FetchDomainsWipLimitFromNotion.new(options)
|
47
|
+
# bot.execute
|
48
|
+
#
|
49
|
+
class FetchDomainsWipLimitFromNotion < Bot::Base
|
50
|
+
# read function to execute the PostgresDB Read component
|
51
|
+
#
|
52
|
+
def read
|
53
|
+
reader = Read::Postgres.new(read_options.merge(conditions))
|
54
|
+
|
55
|
+
reader.execute
|
56
|
+
end
|
57
|
+
|
58
|
+
# Process function to execute the Notion utility to fetch domain wip limits from the notion database
|
59
|
+
#
|
60
|
+
def process
|
61
|
+
response = Utils::Notion::Request.execute(params)
|
62
|
+
|
63
|
+
if response.code == 200
|
64
|
+
domains_limits = normalize_response(response.parsed_response["results"])
|
65
|
+
|
66
|
+
wip_limit_data = wip_count.merge({ domains_limits: })
|
67
|
+
|
68
|
+
{ success: wip_limit_data }
|
69
|
+
else
|
70
|
+
{ error: { message: response.parsed_response, status_code: response.code } }
|
71
|
+
end
|
72
|
+
end
|
73
|
+
|
74
|
+
# Write function to execute the PostgresDB write component
|
75
|
+
#
|
76
|
+
def write
|
77
|
+
write = Write::Postgres.new(write_options, process_response)
|
78
|
+
|
79
|
+
write.execute
|
80
|
+
end
|
81
|
+
|
82
|
+
private
|
83
|
+
|
84
|
+
def conditions
|
85
|
+
{
|
86
|
+
where: "archived=$1 AND tag=$2 AND stage=$3 ORDER BY inserted_at ASC",
|
87
|
+
params: [false, read_options[:tag], "unprocessed"]
|
88
|
+
}
|
89
|
+
end
|
90
|
+
|
91
|
+
def params
|
92
|
+
{
|
93
|
+
endpoint: "databases/#{process_options[:database_id]}/query",
|
94
|
+
secret: process_options[:secret],
|
95
|
+
method: "post",
|
96
|
+
body:
|
97
|
+
}
|
98
|
+
end
|
99
|
+
|
100
|
+
def body
|
101
|
+
{
|
102
|
+
filter: {
|
103
|
+
property: "WIP + On Hold limit",
|
104
|
+
number: { is_not_empty: true }
|
105
|
+
}
|
106
|
+
}
|
107
|
+
end
|
108
|
+
|
109
|
+
def normalize_response(results)
|
110
|
+
return [] if results.nil?
|
111
|
+
|
112
|
+
results.reduce({}) do |domains_limits, domain_wip_limit|
|
113
|
+
domain_fields = domain_wip_limit["properties"]
|
114
|
+
|
115
|
+
domain = extract_domain_name_value(domain_fields["Name"])
|
116
|
+
limit = extract_domain_limit_value(domain_fields["WIP + On Hold limit"])
|
117
|
+
|
118
|
+
domains_limits.merge({ domain => limit })
|
119
|
+
end
|
120
|
+
end
|
121
|
+
|
122
|
+
def extract_domain_name_value(data)
|
123
|
+
data["title"].first["plain_text"]
|
124
|
+
end
|
125
|
+
|
126
|
+
def extract_domain_limit_value(data)
|
127
|
+
data["number"]
|
128
|
+
end
|
129
|
+
|
130
|
+
def wip_count
|
131
|
+
read_response.data.nil? ? {} : read_response.data
|
132
|
+
end
|
133
|
+
end
|
134
|
+
end
|
@@ -0,0 +1,99 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require_relative "./base"
|
4
|
+
require_relative "../read/default"
|
5
|
+
require_relative "../utils/imap/request"
|
6
|
+
require_relative "../write/postgres"
|
7
|
+
|
8
|
+
module Bot
|
9
|
+
##
|
10
|
+
# The Bot::FetchEmailsFromImap class serves as a bot implementation to fetch emails from a imap server
|
11
|
+
# and write them on a PostgresDB table with a specific format.
|
12
|
+
#
|
13
|
+
# <br>
|
14
|
+
# <b>Example</b>
|
15
|
+
#
|
16
|
+
# params = {
|
17
|
+
# process_options: {
|
18
|
+
# refresh_token: "email server refresh token",
|
19
|
+
# client_id: "email server client it",
|
20
|
+
# client_secret: "email server client secret",
|
21
|
+
# token_uri: "email server refresh token uri",
|
22
|
+
# email_domain: "email server domain",
|
23
|
+
# email_port: "email server port",
|
24
|
+
# user_email: "email to be access",
|
25
|
+
# search_email: "email to be search",
|
26
|
+
# inbox: "inbox to be search"
|
27
|
+
# },
|
28
|
+
# write_options: {
|
29
|
+
# connection:,
|
30
|
+
# db_table: "use_cases",
|
31
|
+
# tag: "FetchEmailsFromImap"
|
32
|
+
# }
|
33
|
+
# }
|
34
|
+
#
|
35
|
+
# bot = Bot::FetchEmailsFromImap.new(options)
|
36
|
+
# bot.execute
|
37
|
+
#
|
38
|
+
class FetchEmailsFromImap < Bot::Base
|
39
|
+
# Read function to execute the default Read component
|
40
|
+
#
|
41
|
+
def read
|
42
|
+
reader = Read::Default.new
|
43
|
+
|
44
|
+
reader.execute
|
45
|
+
end
|
46
|
+
|
47
|
+
# Process function to request email from an imap server using the imap utility
|
48
|
+
#
|
49
|
+
def process
|
50
|
+
response = Utils::Imap::Request.new(process_options, query).execute
|
51
|
+
|
52
|
+
if response[:error]
|
53
|
+
{ error: response }
|
54
|
+
else
|
55
|
+
emails = normalize_response(response[:emails])
|
56
|
+
|
57
|
+
{ success: { emails: } }
|
58
|
+
end
|
59
|
+
end
|
60
|
+
|
61
|
+
# Write function to execute the PostgresDB write component
|
62
|
+
#
|
63
|
+
def write
|
64
|
+
write = Write::Postgres.new(write_options, process_response)
|
65
|
+
|
66
|
+
write.execute
|
67
|
+
end
|
68
|
+
|
69
|
+
private
|
70
|
+
|
71
|
+
def query
|
72
|
+
yesterday = (Time.now - (60 * 60 * 24)).strftime("%d-%b-%Y")
|
73
|
+
|
74
|
+
["TO", process_options[:search_email], "SINCE", yesterday]
|
75
|
+
end
|
76
|
+
|
77
|
+
def normalize_response(results)
|
78
|
+
return [] if results.nil?
|
79
|
+
|
80
|
+
results.map do |value|
|
81
|
+
message = value[:message]
|
82
|
+
|
83
|
+
{
|
84
|
+
"message_id": value[:message_id],
|
85
|
+
"sender" => extract_sender(message),
|
86
|
+
"date" => message.date,
|
87
|
+
"subject" => message.subject
|
88
|
+
}
|
89
|
+
end
|
90
|
+
end
|
91
|
+
|
92
|
+
def extract_sender(value)
|
93
|
+
mailbox = value.sender[0]["mailbox"]
|
94
|
+
host = value.sender[0]["host"]
|
95
|
+
|
96
|
+
"#{mailbox}@#{host}"
|
97
|
+
end
|
98
|
+
end
|
99
|
+
end
|
@@ -0,0 +1,142 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require_relative "./base"
|
4
|
+
require_relative "../read/postgres"
|
5
|
+
require_relative "../utils/notion/request"
|
6
|
+
require_relative "../write/postgres"
|
7
|
+
|
8
|
+
module Bot
|
9
|
+
##
|
10
|
+
# The Bot::FetchNextWeekBirthdaysFromNotion class serves as a bot implementation to read next
|
11
|
+
# week birthdays from a notion database and write them on a PostgresDB table with a specific format.
|
12
|
+
#
|
13
|
+
# <br>
|
14
|
+
# <b>Example</b>
|
15
|
+
#
|
16
|
+
# options = {
|
17
|
+
# process_options: {
|
18
|
+
# database_id: "notion database id",
|
19
|
+
# secret: "notion secret"
|
20
|
+
# },
|
21
|
+
# write_options: {
|
22
|
+
# connection: {
|
23
|
+
# host: "host",
|
24
|
+
# port: 5432,
|
25
|
+
# dbname: "bas",
|
26
|
+
# user: "postgres",
|
27
|
+
# password: "postgres"
|
28
|
+
# },
|
29
|
+
# db_table: "birthdays",
|
30
|
+
# tag: "FetchNextWeekBirthdaysFromNotion"
|
31
|
+
# }
|
32
|
+
# }
|
33
|
+
#
|
34
|
+
# bot = Bot::FetchNextWeekBirthdaysFromNotion.new(options)
|
35
|
+
# bot.execute
|
36
|
+
#
|
37
|
+
class FetchNextWeekBirthdaysFromNotion < Bot::Base
|
38
|
+
DAYS_BEFORE = 7
|
39
|
+
|
40
|
+
# read function to execute the PostgresDB Read component
|
41
|
+
#
|
42
|
+
def read
|
43
|
+
reader = Read::Postgres.new(read_options.merge(conditions))
|
44
|
+
|
45
|
+
reader.execute
|
46
|
+
end
|
47
|
+
|
48
|
+
# Process function to execute the Notion utility to fetch PTO's from the notion database
|
49
|
+
#
|
50
|
+
def process
|
51
|
+
response = Utils::Notion::Request.execute(params)
|
52
|
+
|
53
|
+
if response.code == 200
|
54
|
+
birthdays_list = normalize_response(response.parsed_response["results"])
|
55
|
+
|
56
|
+
{ success: { birthdays: birthdays_list } }
|
57
|
+
else
|
58
|
+
{ error: { message: response.parsed_response, status_code: response.code } }
|
59
|
+
end
|
60
|
+
end
|
61
|
+
|
62
|
+
# Write function to execute the PostgresDB write component
|
63
|
+
#
|
64
|
+
def write
|
65
|
+
write = Write::Postgres.new(write_options, process_response)
|
66
|
+
|
67
|
+
write.execute
|
68
|
+
end
|
69
|
+
|
70
|
+
private
|
71
|
+
|
72
|
+
def conditions
|
73
|
+
{
|
74
|
+
where: "archived=$1 AND tag=$2 AND stage=$3 ORDER BY inserted_at ASC",
|
75
|
+
params: [false, read_options[:tag], "unprocessed"]
|
76
|
+
}
|
77
|
+
end
|
78
|
+
|
79
|
+
def params
|
80
|
+
{
|
81
|
+
endpoint: "databases/#{process_options[:database_id]}/query",
|
82
|
+
secret: process_options[:secret],
|
83
|
+
method: "post",
|
84
|
+
body:
|
85
|
+
}
|
86
|
+
end
|
87
|
+
|
88
|
+
def body
|
89
|
+
{
|
90
|
+
filter: {
|
91
|
+
and: [{ property: "BD_this_year", date: { equals: n_days_from_now } }] + last_edited_condition
|
92
|
+
}
|
93
|
+
}
|
94
|
+
end
|
95
|
+
|
96
|
+
def last_edited_condition
|
97
|
+
return [] if read_response.inserted_at.nil?
|
98
|
+
|
99
|
+
[
|
100
|
+
{
|
101
|
+
timestamp: "last_edited_time",
|
102
|
+
last_edited_time: { on_or_after: read_response.inserted_at }
|
103
|
+
}
|
104
|
+
]
|
105
|
+
end
|
106
|
+
|
107
|
+
def n_days_from_now
|
108
|
+
date = Time.now.utc + days_in_second(days_before)
|
109
|
+
|
110
|
+
date.utc.strftime("%F").to_s
|
111
|
+
end
|
112
|
+
|
113
|
+
def days_before
|
114
|
+
process_options[:days_before] || DAYS_BEFORE
|
115
|
+
end
|
116
|
+
|
117
|
+
def days_in_second(days)
|
118
|
+
days * 24 * 60 * 60
|
119
|
+
end
|
120
|
+
|
121
|
+
def normalize_response(results)
|
122
|
+
return [] if results.nil?
|
123
|
+
|
124
|
+
results.map do |value|
|
125
|
+
birthday_fields = value["properties"]
|
126
|
+
|
127
|
+
{
|
128
|
+
"name" => extract_rich_text_field_value(birthday_fields["Complete Name"]),
|
129
|
+
"birthday_date" => extract_date_field_value(birthday_fields["BD_this_year"])
|
130
|
+
}
|
131
|
+
end
|
132
|
+
end
|
133
|
+
|
134
|
+
def extract_rich_text_field_value(data)
|
135
|
+
data["rich_text"][0]["plain_text"]
|
136
|
+
end
|
137
|
+
|
138
|
+
def extract_date_field_value(data)
|
139
|
+
data["formula"]["date"]["start"]
|
140
|
+
end
|
141
|
+
end
|
142
|
+
end
|
@@ -0,0 +1,162 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require_relative "./base"
|
4
|
+
require_relative "../read/default"
|
5
|
+
require_relative "../utils/notion/request"
|
6
|
+
require_relative "../write/postgres"
|
7
|
+
|
8
|
+
module Bot
|
9
|
+
##
|
10
|
+
# The Bot::FetchNextWeekPtosFromNotion class serves as a bot implementation to read next week
|
11
|
+
# PTO's from a notion database and write them on a PostgresDB table with a specific format.
|
12
|
+
#
|
13
|
+
# <br>
|
14
|
+
# <b>Example</b>
|
15
|
+
#
|
16
|
+
# options = {
|
17
|
+
# process_options: {
|
18
|
+
# database_id: "notion database id",
|
19
|
+
# secret: "notion secret"
|
20
|
+
# },
|
21
|
+
# write_options: {
|
22
|
+
# connection: {
|
23
|
+
# host: "host",
|
24
|
+
# port: 5432,
|
25
|
+
# dbname: "bas",
|
26
|
+
# user: "postgres",
|
27
|
+
# password: "postgres"
|
28
|
+
# },
|
29
|
+
# db_table: "pto",
|
30
|
+
# tag: "FetchNextWeekPtosFromNotion"
|
31
|
+
# }
|
32
|
+
# }
|
33
|
+
#
|
34
|
+
# bot = Bot::FetchNextWeekPtosFromNotion.new(options)
|
35
|
+
# bot.execute
|
36
|
+
#
|
37
|
+
class FetchNextWeekPtosFromNotion < Bot::Base
|
38
|
+
# Read function to execute the default Read component
|
39
|
+
#
|
40
|
+
def read
|
41
|
+
reader = Read::Default.new
|
42
|
+
|
43
|
+
reader.execute
|
44
|
+
end
|
45
|
+
|
46
|
+
# Process function to execute the Notion utility to fetch next week PTO's from the notion database
|
47
|
+
#
|
48
|
+
def process
|
49
|
+
response = Utils::Notion::Request.execute(params)
|
50
|
+
|
51
|
+
if response.code == 200
|
52
|
+
ptos_list = normalize_response(response.parsed_response["results"])
|
53
|
+
|
54
|
+
{ success: { ptos: ptos_list } }
|
55
|
+
else
|
56
|
+
{ error: { message: response.parsed_response, status_code: response.code } }
|
57
|
+
end
|
58
|
+
end
|
59
|
+
|
60
|
+
# Write function to execute the PostgresDB write component
|
61
|
+
#
|
62
|
+
def write
|
63
|
+
write = Write::Postgres.new(write_options, process_response)
|
64
|
+
|
65
|
+
write.execute
|
66
|
+
end
|
67
|
+
|
68
|
+
private
|
69
|
+
|
70
|
+
def params
|
71
|
+
{
|
72
|
+
endpoint: "databases/#{process_options[:database_id]}/query",
|
73
|
+
secret: process_options[:secret],
|
74
|
+
method: "post",
|
75
|
+
body:
|
76
|
+
}
|
77
|
+
end
|
78
|
+
|
79
|
+
def body
|
80
|
+
monday, sunday = next_week_dates
|
81
|
+
|
82
|
+
{
|
83
|
+
filter: {
|
84
|
+
or: [
|
85
|
+
belong_next_week("StartDateTime", monday, sunday),
|
86
|
+
belong_next_week("EndDateTime", monday, sunday),
|
87
|
+
cover_next_week(monday, sunday)
|
88
|
+
]
|
89
|
+
}
|
90
|
+
}
|
91
|
+
end
|
92
|
+
|
93
|
+
def next_week_dates
|
94
|
+
monday = next_week_monday
|
95
|
+
sunday = monday + 6
|
96
|
+
|
97
|
+
[monday, sunday]
|
98
|
+
end
|
99
|
+
|
100
|
+
def next_week_monday
|
101
|
+
today = Date.today
|
102
|
+
week_day = today.wday
|
103
|
+
|
104
|
+
days = week_day.zero? ? 1 : 8 - week_day
|
105
|
+
|
106
|
+
today + days
|
107
|
+
end
|
108
|
+
|
109
|
+
def belong_next_week(property, after_day, before_day)
|
110
|
+
{
|
111
|
+
and: [
|
112
|
+
{ property:, date: { on_or_after: after_day } },
|
113
|
+
{ property:, date: { on_or_before: before_day } }
|
114
|
+
]
|
115
|
+
}
|
116
|
+
end
|
117
|
+
|
118
|
+
def cover_next_week(monday, sunday)
|
119
|
+
{
|
120
|
+
and: [
|
121
|
+
{ property: "EndDateTime", date: { on_or_after: sunday } },
|
122
|
+
{ property: "StartDateTime", date: { on_or_before: monday } }
|
123
|
+
]
|
124
|
+
}
|
125
|
+
end
|
126
|
+
|
127
|
+
def normalize_response(results)
|
128
|
+
return [] if results.nil?
|
129
|
+
|
130
|
+
results.map do |pto|
|
131
|
+
pto_fields = pto["properties"]
|
132
|
+
|
133
|
+
{
|
134
|
+
"Name" => extract_description_field_value(pto_fields["Description"]),
|
135
|
+
"StartDateTime" => extract_date_field_value(pto_fields["StartDateTime"]),
|
136
|
+
"EndDateTime" => extract_date_field_value(pto_fields["EndDateTime"])
|
137
|
+
}
|
138
|
+
end
|
139
|
+
end
|
140
|
+
|
141
|
+
def extract_description_field_value(data)
|
142
|
+
names = data["title"].map { |name| name["plain_text"] }
|
143
|
+
|
144
|
+
names.join(" ")
|
145
|
+
end
|
146
|
+
|
147
|
+
def extract_date_field_value(date)
|
148
|
+
{
|
149
|
+
from: extract_start_date(date),
|
150
|
+
to: extract_end_date(date)
|
151
|
+
}
|
152
|
+
end
|
153
|
+
|
154
|
+
def extract_start_date(data)
|
155
|
+
data["date"]["start"]
|
156
|
+
end
|
157
|
+
|
158
|
+
def extract_end_date(data)
|
159
|
+
data["date"]["end"]
|
160
|
+
end
|
161
|
+
end
|
162
|
+
end
|