mallama 0.1.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
mallama-0.1.2/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 YMasoud Gholypour
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1,5 @@
1
+ include README.md
2
+ include LICENSE
3
+ include requirements.txt
4
+ recursive-include ollama_webui/templates *
5
+ recursive-include ollama_webui/static *
mallama-0.1.2/PKG-INFO ADDED
@@ -0,0 +1,98 @@
1
+ Metadata-Version: 2.4
2
+ Name: mallama
3
+ Version: 0.1.2
4
+ Summary: Browser UI for Ollama • Local LLM Interface • Web Chat Client for Local AI Models
5
+ Home-page: https://github.com/mesut2ooo/mallama
6
+ Author: Masoud Gholypour
7
+ Author-email: Masoud Gholypour <masoudgholypour2000@gmail.com>
8
+ License: MIT
9
+ Project-URL: Homepage, https://github.com/mesut2ooo/mallama
10
+ Project-URL: Repository, https://github.com/mesut2ooo/mallama.git
11
+ Project-URL: Issues, https://github.com/mesut2ooo/mallama/issues
12
+ Classifier: Development Status :: 3 - Alpha
13
+ Classifier: Intended Audience :: Developers
14
+ Classifier: License :: OSI Approved :: MIT License
15
+ Classifier: Programming Language :: Python :: 3
16
+ Classifier: Programming Language :: Python :: 3.8
17
+ Classifier: Programming Language :: Python :: 3.9
18
+ Classifier: Programming Language :: Python :: 3.10
19
+ Classifier: Programming Language :: Python :: 3.11
20
+ Requires-Python: >=3.8
21
+ Description-Content-Type: text/markdown
22
+ License-File: LICENSE
23
+ Requires-Dist: flask>=2.0.0
24
+ Requires-Dist: requests>=2.28.0
25
+ Requires-Dist: werkzeug>=2.0.0
26
+ Dynamic: author
27
+ Dynamic: home-page
28
+ Dynamic: license-file
29
+ Dynamic: requires-python
30
+
31
+ # Ollama Web UI
32
+
33
+ A beautiful web interface for Ollama with conversation management and markdown support.
34
+
35
+ ## Features
36
+
37
+ - 💬 Chat with Ollama models
38
+ - 📝 Markdown support with syntax highlighting
39
+ - 💾 Save and manage conversations
40
+ - ⚙️ Adjustable parameters (temperature, top-p, max tokens)
41
+ - 📎 File upload support
42
+ - 🎨 Beautiful glass-morphism UI
43
+ - ⌨️ Keyboard shortcuts (Ctrl+C to stop generation)
44
+
45
+ ## Installation
46
+
47
+ ### Via pip
48
+ ```bash
49
+ pip install mallama
50
+ mallama --host 0.0.0.0 --port 5000
51
+
52
+ Via AUR (Arch Linux)
53
+ bash
54
+
55
+ yay -S mallama
56
+ # or
57
+ paru -S mallama
58
+
59
+ # Run as a service
60
+ systemctl --user enable mallama
61
+ systemctl --user start mallama
62
+
63
+ From source
64
+ bash
65
+
66
+ git clone https://github.com/mesut2ooo/mallama
67
+ cd mallama
68
+ pip install -e .
69
+ mallama
70
+
71
+ Requirements
72
+
73
+ Python 3.8+
74
+
75
+ Ollama installed and running locally (http://localhost:11434)
76
+
77
+ Usage
78
+
79
+ Make sure Ollama is running with at least one model pulled
80
+
81
+ Start the web UI: mallama
82
+
83
+ Open http://localhost:5000 in your browser
84
+
85
+ Select a model and start chatting!
86
+
87
+ Configuration
88
+
89
+ The application stores conversations and uploads in ~/.mallama/
90
+ License
91
+
92
+ MIT
93
+ text
94
+
95
+
96
+ ### **tests/__init__.py**
97
+ ```python
98
+ # Test package
@@ -0,0 +1,68 @@
1
+ # Ollama Web UI
2
+
3
+ A beautiful web interface for Ollama with conversation management and markdown support.
4
+
5
+ ## Features
6
+
7
+ - 💬 Chat with Ollama models
8
+ - 📝 Markdown support with syntax highlighting
9
+ - 💾 Save and manage conversations
10
+ - ⚙️ Adjustable parameters (temperature, top-p, max tokens)
11
+ - 📎 File upload support
12
+ - 🎨 Beautiful glass-morphism UI
13
+ - ⌨️ Keyboard shortcuts (Ctrl+C to stop generation)
14
+
15
+ ## Installation
16
+
17
+ ### Via pip
18
+ ```bash
19
+ pip install mallama
20
+ mallama --host 0.0.0.0 --port 5000
21
+
22
+ Via AUR (Arch Linux)
23
+ bash
24
+
25
+ yay -S mallama
26
+ # or
27
+ paru -S mallama
28
+
29
+ # Run as a service
30
+ systemctl --user enable mallama
31
+ systemctl --user start mallama
32
+
33
+ From source
34
+ bash
35
+
36
+ git clone https://github.com/mesut2ooo/mallama
37
+ cd mallama
38
+ pip install -e .
39
+ mallama
40
+
41
+ Requirements
42
+
43
+ Python 3.8+
44
+
45
+ Ollama installed and running locally (http://localhost:11434)
46
+
47
+ Usage
48
+
49
+ Make sure Ollama is running with at least one model pulled
50
+
51
+ Start the web UI: mallama
52
+
53
+ Open http://localhost:5000 in your browser
54
+
55
+ Select a model and start chatting!
56
+
57
+ Configuration
58
+
59
+ The application stores conversations and uploads in ~/.mallama/
60
+ License
61
+
62
+ MIT
63
+ text
64
+
65
+
66
+ ### **tests/__init__.py**
67
+ ```python
68
+ # Test package
@@ -0,0 +1,2 @@
1
+ """Ollama Web UI - A beautiful web interface for Ollama"""
2
+ __version__ = "0.1.2"
@@ -0,0 +1,46 @@
1
+ #!/usr/bin/env python3
2
+ """
3
+ Main entry point for the mallama package.
4
+ """
5
+ import os
6
+ import sys
7
+ import argparse
8
+ from .app import app
9
+
10
+ def main():
11
+ """Run the Ollama Web UI server."""
12
+ parser = argparse.ArgumentParser(description="Ollama Web UI Server")
13
+ parser.add_argument(
14
+ "--host",
15
+ default="0.0.0.0",
16
+ help="Host to bind to (default: 0.0.0.0)"
17
+ )
18
+ parser.add_argument(
19
+ "--port",
20
+ type=int,
21
+ default=5000,
22
+ help="Port to bind to (default: 5000)"
23
+ )
24
+ parser.add_argument(
25
+ "--debug",
26
+ action="store_true",
27
+ help="Run in debug mode"
28
+ )
29
+
30
+ args = parser.parse_args()
31
+
32
+ print(f"Starting Ollama Web UI on http://{args.host}:{args.port}")
33
+ print("Press Ctrl+C to stop")
34
+
35
+ # Create necessary directories
36
+ os.makedirs(os.path.expanduser("~/.mallama/conversations"), exist_ok=True)
37
+ os.makedirs(os.path.expanduser("~/.mallama/uploads"), exist_ok=True)
38
+
39
+ # Update app config to use user directory
40
+ app.config['UPLOAD_FOLDER'] = os.path.expanduser("~/.mallama/uploads")
41
+ app.config['CONVERSATIONS_FOLDER'] = os.path.expanduser("~/.mallama/conversations")
42
+
43
+ app.run(debug=args.debug, host=args.host, port=args.port)
44
+
45
+ if __name__ == "__main__":
46
+ main()
@@ -0,0 +1,203 @@
1
+ # app.py
2
+ import os
3
+ import json
4
+ import requests
5
+ import uuid
6
+ from flask import Flask, render_template, request, Response, jsonify, abort
7
+ from werkzeug.utils import secure_filename
8
+ from datetime import datetime
9
+
10
+ # Get the directory where this file is located
11
+ BASE_DIR = os.path.dirname(os.path.abspath(__file__))
12
+
13
+ app = Flask(__name__,
14
+ template_folder=os.path.join(BASE_DIR, 'templates'),
15
+ static_folder=os.path.join(BASE_DIR, 'static'))
16
+
17
+ # Use user directory for data storage
18
+ app.config['UPLOAD_FOLDER'] = os.path.expanduser("~/.mallama/uploads")
19
+ app.config['CONVERSATIONS_FOLDER'] = os.path.expanduser("~/.mallama/conversations")
20
+ app.config['MAX_CONTENT_LENGTH'] = 50 * 1024 * 1024 # 50MB limit
21
+
22
+ # Ensure directories exist
23
+ os.makedirs(app.config['UPLOAD_FOLDER'], exist_ok=True)
24
+ os.makedirs(app.config['CONVERSATIONS_FOLDER'], exist_ok=True)
25
+
26
+ OLLAMA_BASE = "http://localhost:11434"
27
+
28
+ # Helper: build prompt from messages and system
29
+ def build_prompt(messages, system_prompt=""):
30
+ prompt = ""
31
+ if system_prompt:
32
+ prompt += f"System: {system_prompt}\n"
33
+ for msg in messages:
34
+ role = msg["role"]
35
+ content = msg["content"]
36
+ if role == "user":
37
+ prompt += f"User: {content}\n"
38
+ elif role == "assistant":
39
+ prompt += f"Assistant: {content}\n"
40
+ else:
41
+ prompt += f"{role}: {content}\n"
42
+ prompt += "Assistant:"
43
+ return prompt
44
+
45
+ # Route: serve UI
46
+ @app.route('/')
47
+ def index():
48
+ return render_template('index.html')
49
+
50
+ # Route: get installed models
51
+ @app.route('/models', methods=['GET'])
52
+ def get_models():
53
+ try:
54
+ resp = requests.get(f"{OLLAMA_BASE}/api/tags")
55
+ if resp.status_code == 200:
56
+ models = resp.json().get('models', [])
57
+ return jsonify([m['name'] for m in models])
58
+ else:
59
+ return jsonify([])
60
+ except:
61
+ return jsonify([])
62
+
63
+ # Route: streaming chat
64
+ @app.route('/chat', methods=['POST'])
65
+ def chat():
66
+ data = request.json
67
+ model = data.get('model')
68
+ messages = data.get('messages', [])
69
+ system = data.get('system', '')
70
+ temperature = data.get('temperature', 0.7)
71
+ top_p = data.get('top_p', 0.9)
72
+ max_tokens = data.get('max_tokens', 2048)
73
+
74
+ if not model:
75
+ return jsonify({'error': 'Model not specified'}), 400
76
+
77
+ # Build prompt from messages
78
+ prompt = build_prompt(messages, system)
79
+
80
+ # Prepare payload for Ollama generate
81
+ payload = {
82
+ "model": model,
83
+ "prompt": prompt,
84
+ "stream": True,
85
+ "options": {
86
+ "temperature": temperature,
87
+ "top_p": top_p,
88
+ "num_predict": max_tokens
89
+ }
90
+ }
91
+
92
+ def generate():
93
+ try:
94
+ with requests.post(f"{OLLAMA_BASE}/api/generate", json=payload, stream=True) as r:
95
+ if r.status_code != 200:
96
+ yield f"data: ERROR: {r.status_code}\n\n"
97
+ return
98
+ for line in r.iter_lines():
99
+ if line:
100
+ try:
101
+ chunk = json.loads(line)
102
+ if 'response' in chunk:
103
+ yield f"data: {json.dumps({'token': chunk['response']})}\n\n"
104
+ if chunk.get('done', False):
105
+ yield f"data: [DONE]\n\n"
106
+ return
107
+ except:
108
+ continue
109
+ except Exception as e:
110
+ yield f"data: ERROR: {str(e)}\n\n"
111
+
112
+ return Response(generate(), mimetype='text/event-stream')
113
+
114
+ # Route: stop generation (client-side abort only)
115
+ @app.route('/stop', methods=['POST'])
116
+ def stop():
117
+ return jsonify({'status': 'stopped'})
118
+
119
+ # Route: save conversation
120
+ @app.route('/save', methods=['POST'])
121
+ def save_conversation():
122
+ data = request.json
123
+ if not data:
124
+ return jsonify({'error': 'No data'}), 400
125
+
126
+ if 'name' not in data:
127
+ messages = data.get('messages', [])
128
+ first_user_msg = next((m for m in messages if m.get('role') == 'user'), None)
129
+ if first_user_msg:
130
+ content = first_user_msg.get('content', '')
131
+ name = content[:30] + '...' if len(content) > 30 else content
132
+ name = name.replace('\n', ' ').strip()
133
+ data['name'] = name or 'New Chat'
134
+ else:
135
+ data['name'] = 'New Chat'
136
+
137
+ filename = f"conv_{datetime.now().strftime('%Y%m%d_%H%M%S')}_{uuid.uuid4().hex[:8]}.json"
138
+ filepath = os.path.join(app.config['CONVERSATIONS_FOLDER'], filename)
139
+ with open(filepath, 'w') as f:
140
+ json.dump(data, f, indent=2)
141
+ return jsonify({'filename': filename})
142
+
143
+ # Route: load conversation
144
+ @app.route('/load', methods=['POST'])
145
+ def load_conversation():
146
+ data = request.json
147
+ filename = data.get('filename')
148
+ if not filename:
149
+ return jsonify({'error': 'Filename missing'}), 400
150
+ filepath = os.path.join(app.config['CONVERSATIONS_FOLDER'], filename)
151
+ if not os.path.exists(filepath):
152
+ return jsonify({'error': 'File not found'}), 404
153
+ with open(filepath, 'r') as f:
154
+ conversation = json.load(f)
155
+ return jsonify(conversation)
156
+
157
+ # Route: list saved conversations
158
+ @app.route('/conversations', methods=['GET'])
159
+ def list_conversations():
160
+ files = [f for f in os.listdir(app.config['CONVERSATIONS_FOLDER']) if f.endswith('.json')]
161
+ files.sort(reverse=True)
162
+ return jsonify(files)
163
+
164
+ # Route: upload file
165
+ @app.route('/upload', methods=['POST'])
166
+ def upload_file():
167
+ if 'file' not in request.files:
168
+ return jsonify({'error': 'No file part'}), 400
169
+ file = request.files['file']
170
+ if file.filename == '':
171
+ return jsonify({'error': 'No selected file'}), 400
172
+ if file:
173
+ filename = secure_filename(file.filename)
174
+ unique_name = f"{uuid.uuid4().hex}_{filename}"
175
+ filepath = os.path.join(app.config['UPLOAD_FOLDER'], unique_name)
176
+ file.save(filepath)
177
+ return jsonify({'filename': unique_name, 'original': filename})
178
+
179
+ # Route: delete a single conversation
180
+ @app.route('/delete', methods=['POST'])
181
+ def delete_conversation():
182
+ data = request.json
183
+ filename = data.get('filename')
184
+ if not filename:
185
+ return jsonify({'error': 'Filename missing'}), 400
186
+
187
+ filepath = os.path.join(app.config['CONVERSATIONS_FOLDER'], filename)
188
+ if os.path.exists(filepath):
189
+ os.remove(filepath)
190
+ return jsonify({'status': 'deleted'})
191
+ return jsonify({'error': 'File not found'}), 404
192
+
193
+ # Route: delete all conversations
194
+ @app.route('/delete-all', methods=['POST'])
195
+ def delete_all_conversations():
196
+ try:
197
+ for filename in os.listdir(app.config['CONVERSATIONS_FOLDER']):
198
+ if filename.endswith('.json'):
199
+ filepath = os.path.join(app.config['CONVERSATIONS_FOLDER'], filename)
200
+ os.remove(filepath)
201
+ return jsonify({'status': 'all deleted'})
202
+ except Exception as e:
203
+ return jsonify({'error': str(e)}), 500
@@ -0,0 +1,98 @@
1
+ Metadata-Version: 2.4
2
+ Name: mallama
3
+ Version: 0.1.2
4
+ Summary: Browser UI for Ollama • Local LLM Interface • Web Chat Client for Local AI Models
5
+ Home-page: https://github.com/mesut2ooo/mallama
6
+ Author: Masoud Gholypour
7
+ Author-email: Masoud Gholypour <masoudgholypour2000@gmail.com>
8
+ License: MIT
9
+ Project-URL: Homepage, https://github.com/mesut2ooo/mallama
10
+ Project-URL: Repository, https://github.com/mesut2ooo/mallama.git
11
+ Project-URL: Issues, https://github.com/mesut2ooo/mallama/issues
12
+ Classifier: Development Status :: 3 - Alpha
13
+ Classifier: Intended Audience :: Developers
14
+ Classifier: License :: OSI Approved :: MIT License
15
+ Classifier: Programming Language :: Python :: 3
16
+ Classifier: Programming Language :: Python :: 3.8
17
+ Classifier: Programming Language :: Python :: 3.9
18
+ Classifier: Programming Language :: Python :: 3.10
19
+ Classifier: Programming Language :: Python :: 3.11
20
+ Requires-Python: >=3.8
21
+ Description-Content-Type: text/markdown
22
+ License-File: LICENSE
23
+ Requires-Dist: flask>=2.0.0
24
+ Requires-Dist: requests>=2.28.0
25
+ Requires-Dist: werkzeug>=2.0.0
26
+ Dynamic: author
27
+ Dynamic: home-page
28
+ Dynamic: license-file
29
+ Dynamic: requires-python
30
+
31
+ # Ollama Web UI
32
+
33
+ A beautiful web interface for Ollama with conversation management and markdown support.
34
+
35
+ ## Features
36
+
37
+ - 💬 Chat with Ollama models
38
+ - 📝 Markdown support with syntax highlighting
39
+ - 💾 Save and manage conversations
40
+ - ⚙️ Adjustable parameters (temperature, top-p, max tokens)
41
+ - 📎 File upload support
42
+ - 🎨 Beautiful glass-morphism UI
43
+ - ⌨️ Keyboard shortcuts (Ctrl+C to stop generation)
44
+
45
+ ## Installation
46
+
47
+ ### Via pip
48
+ ```bash
49
+ pip install mallama
50
+ mallama --host 0.0.0.0 --port 5000
51
+
52
+ Via AUR (Arch Linux)
53
+ bash
54
+
55
+ yay -S mallama
56
+ # or
57
+ paru -S mallama
58
+
59
+ # Run as a service
60
+ systemctl --user enable mallama
61
+ systemctl --user start mallama
62
+
63
+ From source
64
+ bash
65
+
66
+ git clone https://github.com/mesut2ooo/mallama
67
+ cd mallama
68
+ pip install -e .
69
+ mallama
70
+
71
+ Requirements
72
+
73
+ Python 3.8+
74
+
75
+ Ollama installed and running locally (http://localhost:11434)
76
+
77
+ Usage
78
+
79
+ Make sure Ollama is running with at least one model pulled
80
+
81
+ Start the web UI: mallama
82
+
83
+ Open http://localhost:5000 in your browser
84
+
85
+ Select a model and start chatting!
86
+
87
+ Configuration
88
+
89
+ The application stores conversations and uploads in ~/.mallama/
90
+ License
91
+
92
+ MIT
93
+ text
94
+
95
+
96
+ ### **tests/__init__.py**
97
+ ```python
98
+ # Test package
@@ -0,0 +1,16 @@
1
+ LICENSE
2
+ MANIFEST.in
3
+ README.md
4
+ pyproject.toml
5
+ requirements.txt
6
+ setup.py
7
+ mallama/__init__.py
8
+ mallama/__main__.py
9
+ mallama/app.py
10
+ mallama.egg-info/PKG-INFO
11
+ mallama.egg-info/SOURCES.txt
12
+ mallama.egg-info/dependency_links.txt
13
+ mallama.egg-info/entry_points.txt
14
+ mallama.egg-info/requires.txt
15
+ mallama.egg-info/top_level.txt
16
+ tests/__init__.py
@@ -0,0 +1,2 @@
1
+ [console_scripts]
2
+ mallama = ollama_webui.__main__:main
@@ -0,0 +1,3 @@
1
+ flask>=2.0.0
2
+ requests>=2.28.0
3
+ werkzeug>=2.0.0
@@ -0,0 +1,2 @@
1
+ mallama
2
+ tests
@@ -0,0 +1,37 @@
1
+ [build-system]
2
+ requires = ["setuptools>=61.0", "wheel"]
3
+ build-backend = "setuptools.build_meta"
4
+
5
+ [project]
6
+ name = "mallama"
7
+ version = "0.1.2"
8
+ description = "Browser UI for Ollama • Local LLM Interface • Web Chat Client for Local AI Models"
9
+ readme = "README.md"
10
+ authors = [
11
+ {name = "Masoud Gholypour", email = "masoudgholypour2000@gmail.com"}
12
+ ]
13
+ license = {text = "MIT"}
14
+ classifiers = [
15
+ "Development Status :: 3 - Alpha",
16
+ "Intended Audience :: Developers",
17
+ "License :: OSI Approved :: MIT License",
18
+ "Programming Language :: Python :: 3",
19
+ "Programming Language :: Python :: 3.8",
20
+ "Programming Language :: Python :: 3.9",
21
+ "Programming Language :: Python :: 3.10",
22
+ "Programming Language :: Python :: 3.11",
23
+ ]
24
+ dependencies = [
25
+ "flask>=2.0.0",
26
+ "requests>=2.28.0",
27
+ "werkzeug>=2.0.0",
28
+ ]
29
+ requires-python = ">=3.8"
30
+
31
+ [project.urls]
32
+ Homepage = "https://github.com/mesut2ooo/mallama"
33
+ Repository = "https://github.com/mesut2ooo/mallama.git"
34
+ Issues = "https://github.com/mesut2ooo/mallama/issues"
35
+
36
+ [project.scripts]
37
+ mallama = "ollama_webui.__main__:main"
@@ -0,0 +1,4 @@
1
+ # requirements.txt
2
+ Flask==2.3.3
3
+ requests==2.31.0
4
+ Werkzeug==2.3.7
@@ -0,0 +1,4 @@
1
+ [egg_info]
2
+ tag_build =
3
+ tag_date = 0
4
+
mallama-0.1.2/setup.py ADDED
@@ -0,0 +1,30 @@
1
+ from setuptools import setup, find_packages
2
+
3
+ setup(
4
+ name="mallama",
5
+ version="0.1.2",
6
+ packages=find_packages(),
7
+ include_package_data=True,
8
+ install_requires=[
9
+ "flask>=2.0.0",
10
+ "requests>=2.28.0",
11
+ "werkzeug>=2.0.0",
12
+ ],
13
+ entry_points={
14
+ "console_scripts": [
15
+ "mallama=ollama_webui.__main__:main",
16
+ ],
17
+ },
18
+ author="Masoud Gholypour",
19
+ author_email="masoudgholypour2000@gmail.com",
20
+ description="Browser UI for Ollama • Local LLM Interface • Web Chat Client for Local AI Models",
21
+ long_description=open("README.md").read(),
22
+ long_description_content_type="text/markdown",
23
+ url="https://github.com/mesut2ooo/mallama",
24
+ classifiers=[
25
+ "Programming Language :: Python :: 3",
26
+ "License :: OSI Approved :: MIT License",
27
+ "Operating System :: OS Independent",
28
+ ],
29
+ python_requires=">=3.8",
30
+ )
File without changes