coauthor 0.0.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of coauthor might be problematic. Click here for more details.

coauthor/__init__.py ADDED
@@ -0,0 +1,5 @@
1
+ """C2 Platform Coauthor Package"""
2
+
3
+ from __future__ import annotations
4
+
5
+ __version__ = "0.0.5"
coauthor/main.py ADDED
@@ -0,0 +1,81 @@
1
+ """
2
+ This module serves as the entry point for processing command-line arguments
3
+ related to configuration and workflow initialization. It configures logging,
4
+ retrieves configuration details, and initializes workflows based on the parsed
5
+ command-line arguments.
6
+ """
7
+
8
+ import argparse
9
+ import logging
10
+ import os
11
+ from coauthor.utils.logger import Logger
12
+ from coauthor.utils.config import get_config
13
+ from coauthor.modules.workflow import initialize_workflows
14
+
15
+
16
+ def main():
17
+ """
18
+ Parses command-line arguments, sets up logging, retrieves configuration,
19
+ and initializes workflows.
20
+
21
+ The function uses argparse to handle command-line arguments, sets up a logger based
22
+ on the provided or default log level and log file path, and retrieves configuration
23
+ details. It then initializes workflows using the collected configuration and logger.
24
+
25
+ Command-line arguments:
26
+
27
+ --config_path: Path to the configuration file.
28
+
29
+ --profile: Name of profile (e.g., obsidian, python, hugo).
30
+
31
+ --watch: Flag to enable watch mode.
32
+ --scan: Flag to enable scan mode.
33
+ --debug: Flag to set log level to DEBUG.
34
+ --log_file: Path to the log file.
35
+ """
36
+ parser = argparse.ArgumentParser(description="Process arguments to select steps")
37
+ parser.add_argument(
38
+ "--config_path",
39
+ type=str,
40
+ help="Path to the configuration file",
41
+ )
42
+ parser.add_argument(
43
+ "--profile",
44
+ type=str,
45
+ help="Name of profile e.g. obsidian, python, hugo",
46
+ )
47
+ parser.add_argument(
48
+ "--watch",
49
+ action="store_true",
50
+ help="Flag to enable watch mode",
51
+ )
52
+ parser.add_argument(
53
+ "--scan",
54
+ action="store_true",
55
+ help="Flag to enable scan mode",
56
+ )
57
+ parser.add_argument(
58
+ "--debug",
59
+ action="store_true",
60
+ help="Flag to set log level to DEBUG",
61
+ )
62
+ parser.add_argument(
63
+ "--log_file",
64
+ type=str,
65
+ help="Path to the log file",
66
+ )
67
+ args = parser.parse_args()
68
+
69
+ log_level = logging.DEBUG if args.debug else logging.INFO
70
+ if args.log_file:
71
+ log_file = args.log_file
72
+ else:
73
+ log_file = f"{os.getcwd()}/coauthor.log"
74
+ logger = Logger(__name__, log_file=log_file, level=log_level)
75
+ config = get_config(logger=logger, args=args)
76
+ config["args"] = args
77
+ initialize_workflows(config, logger)
78
+
79
+
80
+ if __name__ == "__main__":
81
+ main()
@@ -0,0 +1,3 @@
1
+ """C2 Platform Coauthor Modules Package"""
2
+
3
+ from __future__ import annotations
coauthor/modules/ai.py ADDED
@@ -0,0 +1,177 @@
1
+ import os
2
+ import traceback
3
+ import yaml
4
+ import datetime
5
+ from openai import OpenAI
6
+ from coauthor.utils.match_utils import file_submit_to_ai, path_new_replace
7
+ from coauthor.utils.jinja import render_template, template_exists
8
+
9
+
10
+ def load_system_message(agent_system_path, logger):
11
+ """Loads the system message from a string or a file."""
12
+ logger.info(f"System message from file {agent_system_path}")
13
+ if os.path.exists(agent_system_path):
14
+ try:
15
+ with open(agent_system_path, "r", encoding="utf-8") as file:
16
+ system_message = file.read()
17
+ logger.info(f"Loaded system message from file: {agent_system_path}")
18
+ return system_message
19
+ except Exception as error:
20
+ logger.error(f"Error reading system message file: {error}")
21
+ raise
22
+ else:
23
+ raise FileNotFoundError(f"The system path {agent_system_path} does not exist")
24
+
25
+
26
+ def write_response_to_yaml(config, messages, model, response, logger, duration=None):
27
+ """Writes response data to respective markdown and YAML files."""
28
+ task = config["current-task"]
29
+ workflow = config["current-workflow"]
30
+ coauthor_ai_log_dir = os.getenv("COAUTHOR_AI_LOG_DIR")
31
+ if not coauthor_ai_log_dir:
32
+ logger.debug("Environment variable COAUTHOR_AI_LOG_DIR not set")
33
+ return
34
+
35
+ counter_file_path = os.path.join(coauthor_ai_log_dir, ".ai-prompt-counter")
36
+ if not os.path.exists(counter_file_path):
37
+ message_id = 1
38
+ with open(counter_file_path, "w", encoding="utf-8") as counter_file:
39
+ counter_file.write(str(message_id))
40
+ else:
41
+ with open(counter_file_path, "r+", encoding="utf-8") as counter_file:
42
+ message_id = int(counter_file.read())
43
+ message_id += 1
44
+ counter_file.seek(0)
45
+ counter_file.write(str(message_id))
46
+
47
+ # Create directory path for markdown files
48
+ markdown_dir = os.path.join(coauthor_ai_log_dir, f"{workflow['name']}/{task['id']}")
49
+ os.makedirs(markdown_dir, exist_ok=True)
50
+
51
+ # Write each message to an individual markdown file
52
+ for msg in messages:
53
+ md_file_path = os.path.join(markdown_dir, f"{message_id}-{msg['role']}.md")
54
+ with open(md_file_path, "w", encoding="utf-8") as md_file:
55
+ md_file.write(msg["content"])
56
+ logger.info(f"Message written to Markdown file: {md_file_path}")
57
+
58
+ yaml_file_path = os.path.join(markdown_dir, f"{message_id}.yml")
59
+ task["ai-log-file"] = yaml_file_path
60
+
61
+ data = {
62
+ "messages": [{"role": msg["role"], "tokens": len(msg["content"].split())} for msg in messages], # Count words
63
+ "model": model,
64
+ "response": response,
65
+ "task": task,
66
+ "date": datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
67
+ "duration": duration, # Add the duration to the data
68
+ }
69
+ try:
70
+ with open(yaml_file_path, "w", encoding="utf-8") as file:
71
+ yaml.dump(data, file, default_flow_style=False, allow_unicode=True)
72
+ logger.info(f"Response written to YAML file: {yaml_file_path}")
73
+ except Exception as error:
74
+ logger.error(f"Error writing response to YAML file: {error}")
75
+ raise
76
+
77
+
78
+ def create_chat_completion(config, client, messages, logger):
79
+ """Create a chat completion using OpenAI client."""
80
+ model = config["agent"]["model"]
81
+ try:
82
+ start_time = datetime.datetime.now() # Record start time
83
+ response = client.chat.completions.create(
84
+ messages=messages,
85
+ model=model,
86
+ )
87
+ end_time = datetime.datetime.now() # Record end time
88
+ duration = (end_time - start_time).seconds # Calculate duration
89
+
90
+ content = response.choices[0].message.content.strip()
91
+
92
+ write_response_to_yaml(config, messages, model, content, logger, duration)
93
+
94
+ if content.startswith("```") and content.endswith("```"):
95
+ content_lines = content.splitlines()[1:-1]
96
+ content = "\n".join(content_lines).strip()
97
+ return content
98
+
99
+ except Exception as error:
100
+ logger.error(f"Error creating chat completion: {error}")
101
+ logger.error(traceback.format_exc())
102
+ raise
103
+
104
+
105
+ def prompt_template_path(config, filename):
106
+ task = config["current-task"]
107
+ workflow = config["current-workflow"]
108
+ return f"{workflow['name']}/{task['id']}/{filename}"
109
+
110
+
111
+ def user_template_path(config, filename="user.md"):
112
+ return prompt_template_path(config, filename)
113
+
114
+
115
+ def system_template_path(config, filename="system.md"):
116
+ return prompt_template_path(config, filename)
117
+
118
+
119
+ def process_with_openai_agent(config, logger):
120
+ """Submit content to OpenAI API for processing."""
121
+ task = config["current-task"]
122
+ logger.info(f"Processing content using AI for task {task['id']}")
123
+ agent = config["agent"]
124
+ logger.debug(f"agent: {agent}")
125
+
126
+ if "api_key" in agent:
127
+ api_key = agent["api_key"]
128
+ else:
129
+ api_key = os.getenv(agent["api_key_var"])
130
+ if "api_url" in agent:
131
+ api_url = agent["api_url"]
132
+ else:
133
+ api_url = os.getenv(agent["api_url_var"])
134
+
135
+ client = OpenAI(
136
+ api_key=api_key,
137
+ base_url=api_url,
138
+ )
139
+
140
+ message_system_and_user = {}
141
+ for system_or_user in ["system", "user"]:
142
+ path_template = prompt_template_path(config, f"{system_or_user}.md")
143
+ if template_exists(task, path_template, config, logger):
144
+ message_system_and_user[system_or_user] = render_template(task, path_template, config, logger)
145
+ elif system_or_user in task:
146
+ message_system_and_user[system_or_user] = task[system_or_user]
147
+ else:
148
+ if "path-modify-event" in task and system_or_user == "user" and "user" not in task:
149
+ logger.info(f"Using the file {task['path-modify-event']} as the user message")
150
+ message_system_and_user[system_or_user] = file_submit_to_ai(config, logger)
151
+ else:
152
+ raise ValueError(
153
+ f'An AI task should have a key "{system_or_user}" or a template with path "{path_template}"'
154
+ )
155
+
156
+ # Log only the first line of the messages
157
+ user_message = message_system_and_user["user"].splitlines()[0] if message_system_and_user["user"] else ""
158
+ system_message = message_system_and_user["system"].splitlines()[0] if message_system_and_user["system"] else ""
159
+ logger.debug(f"user_message: {user_message}")
160
+ logger.debug(f"system_message: {system_message}")
161
+
162
+ messages = [
163
+ {
164
+ "role": "system",
165
+ "content": message_system_and_user["system"],
166
+ },
167
+ {"role": "user", "content": message_system_and_user["user"]},
168
+ ]
169
+
170
+ return create_chat_completion(config, client, messages, logger)
171
+
172
+
173
+ def process_file_with_openai_agent(config, logger):
174
+ """Submit file to OpenAI API for processing."""
175
+ task = config["current-task"]
176
+ task["response"] = process_with_openai_agent(config, logger)
177
+ return task["response"]
@@ -0,0 +1,64 @@
1
+ import re
2
+ import time
3
+ from coauthor.utils.match_utils import file_path_match
4
+
5
+
6
+ def pong(config, logger):
7
+ path = config["current-task"]["path-modify-event"]
8
+ logger.info("Running pong file processor " + path)
9
+ time.sleep(3)
10
+ with open(path, "r") as file:
11
+ file_contents = file.read()
12
+ if file_contents != "pong":
13
+ logger.info(f'Updating {path} to "pong"')
14
+ with open(path, "w") as f:
15
+ f.write("pong")
16
+
17
+
18
+ def regex_replace_in_file(config, logger):
19
+ task = config["current-task"]
20
+ path = task["path-modify-event"]
21
+ path_match = file_path_match(config, logger)
22
+ if not path_match:
23
+ logger.debug(f"regex_replace_in_file: no path match for {path}")
24
+ return False
25
+ patterns = task["patterns"]
26
+
27
+ # Open the file and read the content
28
+ with open(path, "r", encoding="utf-8") as file:
29
+ content = file.read()
30
+
31
+ original_content = content
32
+
33
+ # Apply each regex pattern and handle internal matches/replacement if needed
34
+ for pattern_set in patterns:
35
+ logger.debug(f"regex_replace_in_file: {path}, patterns: {pattern_set}")
36
+ pattern = pattern_set["regex"]
37
+
38
+ # Check for internal modifications based on run-time conditions
39
+ internal_regex = pattern_set.get("internal_regex")
40
+ internal_replace = pattern_set.get("internal_replace")
41
+ if internal_regex and internal_replace:
42
+ content = re.sub(
43
+ pattern,
44
+ lambda match: re.sub(
45
+ internal_regex,
46
+ internal_replace,
47
+ match.group(0),
48
+ ),
49
+ content,
50
+ )
51
+ else:
52
+ replace = pattern_set["replace"]
53
+ content = re.sub(pattern, replace, content)
54
+
55
+ if content == original_content:
56
+ logger.debug("regex_replace_in_file: no content was changed")
57
+ return False
58
+
59
+ logger.info(f"Regex patterns changed file {path}, patterns: {patterns}")
60
+ task["content"] = content
61
+ with open(path, "w", encoding="utf-8") as file:
62
+ file.write(content)
63
+ time.sleep(3)
64
+ return True
@@ -0,0 +1,45 @@
1
+ import os
2
+ import time
3
+ from coauthor.modules.ai import process_file_with_openai_agent
4
+
5
+ from coauthor.utils.workflow_utils import get_workflows_that_scan
6
+ from coauthor.modules.file_processor import regex_replace_in_file, pong
7
+ from coauthor.modules.workflow_tasks import write_file, read_file
8
+
9
+ task_type_functions = {
10
+ "process_file_with_openai_agent": process_file_with_openai_agent,
11
+ "regex_replace_in_file": regex_replace_in_file,
12
+ "pong": pong,
13
+ "write_file": write_file,
14
+ "read_file": read_file,
15
+ }
16
+
17
+
18
+ def scan(config, logger):
19
+ workflows_that_scan = get_workflows_that_scan(config, logger)
20
+ logger.debug(f"workflows_that_scan: {workflows_that_scan}")
21
+ for workflow in workflows_that_scan:
22
+ scan_directories = workflow["scan"]["filesystem"]["paths"]
23
+ logger.info(f"Workflow {workflow['name']}: scan directories {', '.join(scan_directories)}")
24
+
25
+ wd_to_path = {}
26
+ for directory in scan_directories:
27
+ for root, _, files in os.walk(directory):
28
+ for filename in files:
29
+ path = os.path.join(root, filename)
30
+ handle_workflow_scan_file(path, workflow, config, logger)
31
+
32
+
33
+ def handle_workflow_scan_file(path, workflow, config, logger):
34
+ logger.info(f"Processing file {path}")
35
+ for task in workflow["tasks"]:
36
+ logger.debug(f"task: {task}")
37
+ if task["type"] in task_type_functions:
38
+ logger.debug(f"Workflow: {workflow['name']}, Task: {task['id']} → {path}")
39
+ if "path-modify-event" not in task:
40
+ task["path-modify-event"] = path
41
+ config["current-task"] = task
42
+ config["current-workflow"] = workflow
43
+ task_type_functions[task["type"]](config, logger)
44
+ else:
45
+ raise ValueError(f'Unsupported task_type: {task["type"]}')
@@ -0,0 +1,173 @@
1
+ import os
2
+ import time
3
+ from inotify_simple import INotify, flags
4
+
5
+ from coauthor.utils.workflow_utils import (
6
+ get_workflows_that_watch,
7
+ get_workflows_that_scan,
8
+ get_all_scan_directories_from_workflows,
9
+ get_all_watch_directories_from_workflows,
10
+ )
11
+ from coauthor.utils.match_utils import file_path_match, file_content_match
12
+ from coauthor.modules.ai import process_file_with_openai_agent
13
+ from coauthor.modules.file_processor import regex_replace_in_file, pong
14
+ from coauthor.modules.workflow_tasks import write_file, read_file
15
+
16
+ task_type_functions = {
17
+ "process_file_with_openai_agent": process_file_with_openai_agent,
18
+ "regex_replace_in_file": regex_replace_in_file,
19
+ "pong": pong,
20
+ "write_file": write_file,
21
+ "read_file": read_file,
22
+ }
23
+
24
+ # Initialize a dictionary to store the last modification time of each file based
25
+ # on workflow and task
26
+ last_modification_times = {}
27
+
28
+
29
+ def add_watch_recursive(inotify, directory):
30
+ """Recursively add watches on all subdirectories,
31
+ ignoring certain directories."""
32
+ wd_to_path = {}
33
+ for root, dirs, files in os.walk(directory):
34
+ # Ignore specific directories like __pycache__ # TODO .obsidian directory
35
+ dirs[:] = [d for d in dirs if d != "__pycache__"]
36
+ wd = inotify.add_watch(root, flags.CREATE | flags.MODIFY)
37
+ wd_to_path[wd] = root
38
+ return wd_to_path
39
+
40
+
41
+ def watch(config, logger):
42
+ watch_directory(config, logger)
43
+
44
+
45
+ def handle_inotify_event(event, wd_to_path, inotify, config, logger):
46
+ """Handle inotify events by accurately determining the file changed.
47
+
48
+ For MODIFY events, due to some editors' behavior of replacing files rather than directly modifying them,
49
+ this function identifies the most recently updated file in the event's directory as the file affected by the event.
50
+ """
51
+ directory = wd_to_path.get(event.wd, "")
52
+ logger.debug(
53
+ f"Inotify Event: directory={directory}, event_mask={event.mask}, flags.from_mask={flags.from_mask(event.mask)}"
54
+ )
55
+
56
+ # Find the most recently modified file in the directory, ignoring hidden files
57
+ def get_recently_modified_file(file_path_inotify, dir_path, logger):
58
+ all_files = [
59
+ os.path.join(dir_path, f)
60
+ for f in os.listdir(dir_path)
61
+ if os.path.isfile(os.path.join(dir_path, f)) and not f.startswith(".")
62
+ ]
63
+ logger.debug(f"all_files: {', '.join(all_files)}")
64
+ if file_path_inotify in all_files:
65
+ return file_path_inotify
66
+ return max(all_files, key=os.path.getmtime) if all_files else None
67
+
68
+ file_path_inotify = os.path.join(directory, event.name)
69
+ file_path = get_recently_modified_file(file_path_inotify, directory, logger)
70
+
71
+ # Check if it is a directory and add watch if new directory is created
72
+ if flags.CREATE in flags.from_mask(event.mask) and os.path.isdir(file_path_inotify):
73
+ logger.info(f"Watching new directory: {file_path_inotify}")
74
+ wd = inotify.add_watch(file_path_inotify, flags.CREATE | flags.MODIFY | flags.CLOSE_WRITE)
75
+ wd_to_path[wd] = file_path_inotify
76
+
77
+ if file_path_inotify != file_path:
78
+ logger.warning(f"file_path_inotify: {file_path_inotify} is not equal to file_path: {file_path}")
79
+ logger.debug(" this can occur depending on how editors write changes to files")
80
+ logger.debug(" For example Gedit uses a temporary file .goutputstream-G1SHX2")
81
+ file_path_selected = file_path
82
+ time.sleep(2) # allow Gedit some time to finish updating file
83
+ else:
84
+ file_path_selected = file_path_inotify
85
+
86
+ ignore_extensions = config.get("watch-ignore-file-extensions", [])
87
+ if flags.MODIFY in flags.from_mask(event.mask) and file_path_selected:
88
+ file_extension = os.path.splitext(file_path_selected)[1]
89
+ if file_extension not in ignore_extensions:
90
+ logger.info(f"MODIFY event: file_path_inotify: {file_path_selected}")
91
+ handle_workflow_file_modify_event(file_path_selected, config, logger)
92
+
93
+ # Stop the process if `stop` file is created
94
+ if os.path.basename(file_path_selected) == "stop":
95
+ logger.info("Stop file found!")
96
+ os.remove(file_path_selected)
97
+ return True
98
+
99
+ return False
100
+
101
+
102
+ def watch_directory(config, logger):
103
+ watch_directories = get_all_watch_directories_from_workflows(config, logger)
104
+ inotify = INotify()
105
+
106
+ logger.info(f"Watching directories recursively: {', '.join(watch_directories)}")
107
+
108
+ wd_to_path = {}
109
+ for directory in watch_directories:
110
+ wd_to_path.update(add_watch_recursive(inotify, directory))
111
+
112
+ try:
113
+ while True:
114
+ for event in inotify.read():
115
+ if handle_inotify_event(event, wd_to_path, inotify, config, logger):
116
+ return
117
+ except KeyboardInterrupt:
118
+ print("Stopping directory watch")
119
+ finally:
120
+ for wd in wd_to_path.keys():
121
+ inotify.rm_watch(wd)
122
+
123
+
124
+ def handle_workflow_file_modify_event(path, config, logger):
125
+ workflows_that_watch = get_workflows_that_watch(config, logger)
126
+ logger.debug(f"get_workflows_that_watch: workflows_that_watch: {workflows_that_watch}")
127
+ for workflow in workflows_that_watch:
128
+ config["current-workflow"] = workflow
129
+ logger.debug(f"workflow: {workflow}")
130
+ for task in workflow["tasks"]:
131
+ config["current-task"] = task
132
+ task["path-modify-event"] = path
133
+ path_match = file_path_match(config, logger)
134
+ if "content_patterns" in workflow:
135
+ content_match = file_content_match(config, logger)
136
+ logger.debug(f'Workflow has no "content_patterns", so "content_match" is True')
137
+ else:
138
+ content_match = True
139
+ if not path_match or not content_match:
140
+ logger.debug(f"No path or content match on {path}")
141
+ continue
142
+ logger.info(f"Path and content match on {path}")
143
+ if task["type"] in task_type_functions:
144
+ logger.info(f"Workflow: {workflow['name']}, Task: {task['id']} → {path}")
145
+ if not ignore_rapid_modify_event(path, config, logger):
146
+ task_type_functions[task["type"]](config, logger)
147
+ else:
148
+ raise ValueError(f'Unsupported task_type: {task["type"]}')
149
+
150
+
151
+ # Some tools / IDE depending on configuraton / extensions / plugins might cause
152
+ # multiple modify events. To distinquishd from technical save from user save
153
+ # we can set a time limit and ignore the same modify event
154
+ def ignore_rapid_modify_event(path, config, logger):
155
+ current_time = time.time()
156
+ workflow = config["current-workflow"]
157
+ task = config["current-task"]
158
+ # Determine unique key based on file path, workflow and task id
159
+ key = (path, workflow["name"], task["id"])
160
+ last_time = last_modification_times.get(key, 0)
161
+
162
+ modify_event_limit = 3
163
+ if "modify_event_limit" in task:
164
+ modify_event_limit = task["modify_event_limit"]
165
+
166
+ # If the time since the last modification event is less than
167
+ # modify_event_limit, ignore this event
168
+ if current_time - last_time < modify_event_limit:
169
+ logger.info(f" Ignoring rapid MODIFY event for (modify_event_limit: {modify_event_limit}).")
170
+ return True
171
+ last_modification_times[key] = current_time
172
+ logger.debug(f" NOT ignoring rapid MODIFY event for (modify_event_limit: {modify_event_limit}).")
173
+ return False
@@ -0,0 +1,32 @@
1
+ # from coauthor.modules.file_watcher import watch
2
+ # from coauthor.modules.file_scanner import scan
3
+ # from coauthor.modules.ai import process_file_with_openai_agent
4
+ # from coauthor.modules.file_processor import pong, regex_replace_in_file
5
+
6
+ # from coauthor.modules.workflow_tasks import read_file, write_file
7
+ from coauthor.modules.file_scanner import scan
8
+ from coauthor.modules.file_watcher import watch
9
+ from coauthor.utils.workflow_utils import (
10
+ get_all_scan_directories_from_workflows,
11
+ get_all_watch_directories_from_workflows,
12
+ )
13
+
14
+
15
+ def initialize_workflows(config, logger, trigger_scan=False):
16
+ if not "workflows" in config:
17
+ logger.warning("No workflows in config, nothing to do")
18
+ return
19
+ args = config.get("args", None)
20
+ if (args and args.scan) or trigger_scan:
21
+ logger.info("Scan mode enabled with --scan")
22
+ scan_directories = get_all_scan_directories_from_workflows(config, logger)
23
+ logger.info(f"scan_directories: {', '.join(scan_directories)}")
24
+ if len(scan_directories) > 0:
25
+ scan(config, logger)
26
+ else:
27
+ logger.debug(f"No scan directories!")
28
+ if args and args.watch:
29
+ watch_directories = get_all_watch_directories_from_workflows(config, logger)
30
+ logger.info("Watch mode enabled with --watch")
31
+ if len(watch_directories) > 0:
32
+ watch(config, logger)
@@ -0,0 +1,32 @@
1
+ from coauthor.utils.jinja import render_content
2
+ import os
3
+
4
+
5
+ def read_file(config, logger):
6
+ logger.debug(f"config: {config}")
7
+ task = config["current-task"]
8
+ path = task["path-modify-event"]
9
+ logger.info(f"Reading file from {path}")
10
+ with open(path, "r", encoding="utf-8") as file:
11
+ content = file.read()
12
+ task["content"] = content
13
+
14
+
15
+ def write_file(config, logger):
16
+ logger.debug(f"config: {config}")
17
+ task = config["current-task"]
18
+ if "path" in task:
19
+ path = render_content(task, task["path"], config, logger)
20
+ else:
21
+ path = task["path-modify-event"]
22
+
23
+ directory = os.path.dirname(path)
24
+ if not os.path.exists(directory):
25
+ os.makedirs(directory)
26
+ logger.warning(f"Directory {directory} did not exist, created it.")
27
+
28
+ logger.info(f"Writing to file at {path}")
29
+
30
+ content = render_content(task, task["content"], config, logger)
31
+ with open(path, "w", encoding="utf-8") as file:
32
+ file.write(content)