lasmnemonicsid 0.0.3rc0__py3-none-any.whl → 0.0.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,134 @@
1
+
2
+ import LASMnemonicsID.utils.mnemonics as mnm
3
+ from LASMnemonicsID.utils.mnemonics import (
4
+ gamma_names,
5
+ sp_names,
6
+ caliper_names,
7
+ deepres_names,
8
+ rxo_names,
9
+ density_names,
10
+ density_correction_names,
11
+ neutron_names,
12
+ dtc_names,
13
+ dts_names,
14
+ pe_names,
15
+ )
16
+ import os
17
+ import pandas as pd
18
+ from pathlib import Path
19
+
20
+ # Import helper functions from LAS module
21
+ from ..LAS.LAS import create_mnemonic_dict, _standardize_all_curves
22
+
23
+
24
+ def parseASCII(input_path, verbose=True, preferred_names=None, depth_col="DEPTH", delimiter=","):
25
+ """
26
+ Parse ASCII/CSV/TXT well log file or all in directory → DataFrame or {filename: df}.
27
+
28
+ Args:
29
+ input_path (str/Path): ASCII/CSV/TXT file or directory
30
+ verbose (bool): Print info
31
+ preferred_names (dict, optional): Mapping of curve types to preferred column names.
32
+ Example: {"deepres": "RT", "gamma": "GR"}
33
+ If not provided, defaults to standard petrophysical names.
34
+ depth_col (str): Name of depth column (default: "DEPTH")
35
+ delimiter (str): CSV delimiter (default: ",")
36
+
37
+ Returns:
38
+ DataFrame (single) or dict {filename: df} (multiple/dir)
39
+ """
40
+ input_path = Path(input_path)
41
+
42
+ # Define default standard names
43
+ std_names = {
44
+ "gamma": "GR",
45
+ "sp": "SP",
46
+ "caliper": "CALI",
47
+ "deepres": "RT",
48
+ "rxo": "RXO",
49
+ "density": "RHOB",
50
+ "density_correction": "DRHO",
51
+ "neutron": "NPHI",
52
+ "dtc": "DT",
53
+ "dts": "DTS",
54
+ "pe": "PEF"
55
+ }
56
+
57
+ # Update with user preferences if provided
58
+ if preferred_names:
59
+ std_names.update(preferred_names)
60
+
61
+ # All supported ASCII extensions (case-insensitive)
62
+ ascii_extensions = ['.csv', '.txt', '.asc', '.dat', '.ascii']
63
+
64
+ # Case 1: Single File
65
+ if input_path.is_file() and input_path.suffix.lower() in ascii_extensions:
66
+ df = _read_single_ascii(input_path, verbose, std_names, depth_col, delimiter)
67
+ return df if df is not None else None
68
+
69
+ # Case 2: Directory (Recursive) - CASE-INSENSITIVE
70
+ ascii_files = [f for f in input_path.rglob("*") if f.suffix.lower() in ascii_extensions]
71
+ if not ascii_files:
72
+ if verbose:
73
+ print(f"No ASCII/CSV files found in {input_path}")
74
+ return {}
75
+
76
+ ascii_dict = {}
77
+ for ascii_file in ascii_files:
78
+ df = _read_single_ascii(ascii_file, verbose, std_names, depth_col, delimiter)
79
+ if df is not None:
80
+ filename = ascii_file.name
81
+ ascii_dict[filename] = df
82
+
83
+ # Return single DF if only 1 file found, else dict
84
+ if len(ascii_dict) == 1:
85
+ return next(iter(ascii_dict.values()))
86
+
87
+ return ascii_dict
88
+
89
+
90
+ def _read_single_ascii(ascii_file_path, verbose, std_names, depth_col, delimiter):
91
+ """Read single ASCII/CSV file to DataFrame and standardize ALL curves."""
92
+ try:
93
+ # Try reading the file
94
+ df = pd.read_csv(ascii_file_path, delimiter=delimiter)
95
+
96
+ if df.empty:
97
+ if verbose:
98
+ print(f"✗ Empty DataFrame: {ascii_file_path.name}")
99
+ return None
100
+
101
+ # Handle depth column (case-insensitive)
102
+ depth_cols = [col for col in df.columns if col.upper() == depth_col.upper()]
103
+ if depth_cols:
104
+ df.set_index(depth_cols[0], inplace=True)
105
+ else:
106
+ # Use first column as depth
107
+ df.set_index(df.columns[0], inplace=True)
108
+
109
+ # Ensure index is float
110
+ df.index = df.index.astype(float)
111
+ df.index.name = "DEPTH"
112
+
113
+ # Create fake las_data object for standardization
114
+ class FakeLASData:
115
+ pass
116
+
117
+ fake_las = FakeLASData()
118
+
119
+ # Standardize ALL curves (GR, RHOB, NPHI, etc.)
120
+ _standardize_all_curves(fake_las, df, std_names)
121
+
122
+ if verbose:
123
+ print(f"✓ {ascii_file_path.name}")
124
+ return df
125
+
126
+ except Exception as e:
127
+ if verbose:
128
+ print(f"✗ Error in {ascii_file_path.name}: {type(e).__name__}: {e}")
129
+ return None
130
+
131
+
132
+ def _get_well_name(ascii_file_path):
133
+ """Extract well name from ASCII file (use filename)"""
134
+ return ascii_file_path.stem
@@ -0,0 +1,2 @@
1
+
2
+ from .ASCII import *
@@ -0,0 +1,150 @@
1
+
2
+ import LASMnemonicsID.utils.mnemonics as mnm
3
+ from LASMnemonicsID.utils.mnemonics import (
4
+ gamma_names,
5
+ sp_names,
6
+ caliper_names,
7
+ deepres_names,
8
+ rxo_names,
9
+ density_names,
10
+ density_correction_names,
11
+ neutron_names,
12
+ dtc_names,
13
+ dts_names,
14
+ pe_names,
15
+ )
16
+ import os
17
+ import pandas as pd
18
+ import dlisio
19
+ from pathlib import Path
20
+
21
+ # Import helper functions from LAS module
22
+ from ..LAS.LAS import create_mnemonic_dict, _standardize_all_curves
23
+
24
+
25
+ def parseDLIS(input_path, verbose=True, preferred_names=None):
26
+ """
27
+ Parse DLIS file or all in directory → DataFrame or {filename: df}.
28
+
29
+ Args:
30
+ input_path (str/Path): DLIS file or directory
31
+ verbose (bool): Print info
32
+ preferred_names (dict, optional): Mapping of curve types to preferred column names.
33
+ Example: {"deepres": "RT", "gamma": "GR"}
34
+ If not provided, defaults to standard petrophysical names.
35
+
36
+ Returns:
37
+ DataFrame (single) or dict {filename: df} (multiple/dir)
38
+ """
39
+ input_path = Path(input_path)
40
+
41
+ # Define default standard names
42
+ std_names = {
43
+ "gamma": "GR",
44
+ "sp": "SP",
45
+ "caliper": "CALI",
46
+ "deepres": "RT",
47
+ "rxo": "RXO",
48
+ "density": "RHOB",
49
+ "density_correction": "DRHO",
50
+ "neutron": "NPHI",
51
+ "dtc": "DT",
52
+ "dts": "DTS",
53
+ "pe": "PEF"
54
+ }
55
+
56
+ # Update with user preferences if provided
57
+ if preferred_names:
58
+ std_names.update(preferred_names)
59
+
60
+ # Case 1: Single File (case-insensitive)
61
+ if input_path.is_file() and input_path.suffix.lower() == '.dlis':
62
+ df = _read_single_dlis(input_path, verbose, std_names)
63
+ return df if df is not None else None
64
+
65
+ # Case 2: Directory (Recursive) - CASE-INSENSITIVE
66
+ dlis_files = [f for f in input_path.rglob("*") if f.suffix.lower() == '.dlis']
67
+ if not dlis_files:
68
+ if verbose:
69
+ print(f"No DLIS files found in {input_path}")
70
+ return {}
71
+
72
+ dlis_dict = {}
73
+ for dlis_file in dlis_files:
74
+ df = _read_single_dlis(dlis_file, verbose, std_names)
75
+ if df is not None:
76
+ filename = dlis_file.name
77
+ dlis_dict[filename] = df
78
+
79
+ # Return single DF if only 1 file found, else dict
80
+ if len(dlis_dict) == 1:
81
+ return next(iter(dlis_dict.values()))
82
+
83
+ return dlis_dict
84
+
85
+
86
+ def _read_single_dlis(dlis_file_path, verbose, std_names):
87
+ """Read single DLIS file to DataFrame and standardize ALL curves."""
88
+ try:
89
+ with dlisio.dlis.load(str(dlis_file_path)) as (f, *rest):
90
+ if not f.frames:
91
+ if verbose:
92
+ print(f"✗ No frames: {dlis_file_path.name}")
93
+ return None
94
+
95
+ # Use first frame (typically contains main log data)
96
+ frame = f.frames[0]
97
+ curves_data = frame.curves()
98
+
99
+ # Get channel names
100
+ channels = [ch.name for ch in frame.channels]
101
+
102
+ # Create DataFrame
103
+ df = pd.DataFrame(curves_data, columns=channels)
104
+
105
+ if df.empty:
106
+ if verbose:
107
+ print(f"✗ Empty DataFrame: {dlis_file_path.name}")
108
+ return None
109
+
110
+ # Set depth index (typically first column or frame.index)
111
+ if frame.index:
112
+ index_name = frame.index
113
+ if index_name in df.columns:
114
+ df.set_index(index_name, inplace=True)
115
+ else:
116
+ # Use first column as depth
117
+ df.set_index(df.columns[0], inplace=True)
118
+
119
+ # Ensure index is float
120
+ df.index = df.index.astype(float)
121
+ df.index.name = "DEPTH"
122
+
123
+ # Create fake las_data object for standardization
124
+ class FakeLASData:
125
+ pass
126
+
127
+ fake_las = FakeLASData()
128
+
129
+ # Standardize ALL curves (GR, RHOB, NPHI, etc.)
130
+ _standardize_all_curves(fake_las, df, std_names)
131
+
132
+ if verbose:
133
+ print(f"✓ {dlis_file_path.name}")
134
+ return df
135
+
136
+ except Exception as e:
137
+ if verbose:
138
+ print(f"✗ Error in {dlis_file_path.name}: {type(e).__name__}: {e}")
139
+ return None
140
+
141
+
142
+ def _get_well_name(dlis_file_path):
143
+ """Extract well name from DLIS file"""
144
+ try:
145
+ with dlisio.dlis.load(str(dlis_file_path)) as (f, *rest):
146
+ if f.origins:
147
+ return str(f.origins[0].well_name).strip()
148
+ except:
149
+ pass
150
+ return dlis_file_path.stem
@@ -0,0 +1,2 @@
1
+
2
+ from .DLIS import parseDLIS
@@ -1,16 +1,15 @@
1
-
2
1
  # src/LASMnemonicsID/__init__.py
3
2
 
4
3
  """LASMnemonicsID package for well log analysis."""
5
4
 
6
- # Import submodules as objects
7
5
  from . import LAS
8
- #from . import DLIS
6
+ from . import DLIS
7
+ from . import ASCII
9
8
  from . import utils
10
9
 
11
- # Import all functions directly for convenience
12
- from .LAS import *
13
- from .DLIS import *
14
- from .utils import *
10
+ from .LAS.LAS import *
11
+ from .DLIS.DLIS import *
12
+ from .ASCII.ASCII import *
13
+ from .utils.mnemonics import *
15
14
 
16
15
  __version__ = "0.0.1"
@@ -0,0 +1,276 @@
1
+ Metadata-Version: 2.4
2
+ Name: lasmnemonicsid
3
+ Version: 0.0.5
4
+ Summary: Well log mnemonic identification using lasio and dlisio to load LAS/DLIS/ASCII files into DataFrames
5
+ Author-email: Nobleza Energy <info@nobleza-energy.com>
6
+ License: MIT
7
+ Project-URL: Homepage, https://nobleza-energy.github.io/LASMnemonicsID
8
+ Project-URL: Repository, https://github.com/Nobleza-Energy/LASMnemonicsID
9
+ Project-URL: Documentation, https://nobleza-energy.github.io/LASMnemonicsID/
10
+ Project-URL: Bug Tracker, https://github.com/Nobleza-Energy/LASMnemonicsID/issues
11
+ Classifier: Development Status :: 3 - Alpha
12
+ Classifier: Intended Audience :: Science/Research
13
+ Classifier: License :: OSI Approved :: MIT License
14
+ Classifier: Operating System :: OS Independent
15
+ Classifier: Programming Language :: Python :: 3
16
+ Classifier: Programming Language :: Python :: 3.10
17
+ Classifier: Programming Language :: Python :: 3.11
18
+ Classifier: Programming Language :: Python :: 3.12
19
+ Classifier: Topic :: Scientific/Engineering
20
+ Requires-Python: >=3.10
21
+ Description-Content-Type: text/markdown
22
+ License-File: LICENSE
23
+ Requires-Dist: numpy>=1.21.0
24
+ Requires-Dist: pandas>=2.0.1
25
+ Requires-Dist: lasio>=0.30
26
+ Requires-Dist: dlisio>=1.0.0
27
+ Provides-Extra: docs
28
+ Requires-Dist: mkdocs>=1.5.0; extra == "docs"
29
+ Requires-Dist: mkdocs-material>=9.0.0; extra == "docs"
30
+ Requires-Dist: mkdocstrings[python]>=0.24.0; extra == "docs"
31
+ Provides-Extra: dev
32
+ Requires-Dist: pytest>=7.0.0; extra == "dev"
33
+ Requires-Dist: black>=23.0.0; extra == "dev"
34
+ Requires-Dist: isort>=5.12.0; extra == "dev"
35
+ Requires-Dist: flake8>=6.0.0; extra == "dev"
36
+ Requires-Dist: pytest-cov>=4.0.0; extra == "dev"
37
+ Dynamic: license-file
38
+
39
+ # LASMnemonicsID
40
+
41
+ <p align="center">
42
+ <img src="https://github.com/Nobleza-Energy/LASMnemonicsID/blob/e44bfb606fef5cfc9c3df6e41c3d1bd0d7bb08ae/logo.png?raw=true" alt="LASMnemonicsID Logo" width="200"/>
43
+ </p>
44
+
45
+ <p align="center">
46
+ <b>Well log mnemonic identification and standardization for LAS, DLIS, and ASCII formats</b>
47
+ </p>
48
+
49
+ <p align="center">
50
+ <a href="https://pypi.org/project/lasmnemonicsid/"><img src="https://img.shields.io/pypi/v/lasmnemonicsid.svg" alt="PyPI"></a>
51
+ <a href="https://pypi.org/project/lasmnemonicsid/"><img src="https://img.shields.io/pypi/pyversions/lasmnemonicsid.svg" alt="Python Versions"></a>
52
+ <a href="https://github.com/Nobleza-Energy/LASMnemonicsID/blob/main/LICENSE"><img src="https://img.shields.io/github/license/Nobleza-Energy/LASMnemonicsID.svg" alt="License"></a>
53
+ </p>
54
+
55
+ ---
56
+
57
+ ## Features
58
+
59
+ - **Multi-format support**: LAS, DLIS, ASCII/CSV/TXT/DAT
60
+ - **Automatic mnemonic standardization**: GR, RHOB, NPHI, DT, SP, CALI, RT, etc.
61
+ - **Batch processing**: Parse entire directories recursively
62
+ - **Customizable naming**: Override default standard names
63
+ - **Case-insensitive extensions**: Works with .las/.LAS, .dlis/.DLIS, .csv/.CSV, etc.
64
+ - **Pandas integration**: Returns clean DataFrames ready for analysis
65
+
66
+ ---
67
+
68
+ ## Installation
69
+
70
+ ```bash
71
+ pip install lasmnemonicsid
72
+ ```
73
+
74
+ This installs support for **all formats** (LAS, DLIS, ASCII/CSV/TXT).
75
+
76
+ ---
77
+
78
+ ## Quick Start
79
+
80
+ ### LAS Files
81
+
82
+ ```python
83
+ from LASMnemonicsID import parseLAS
84
+
85
+ # Parse single LAS file
86
+ df = parseLAS("well.las")
87
+ print(df.head())
88
+
89
+ # Parse directory
90
+ data = parseLAS("/path/to/las/files/")
91
+ for filename, df in data.items():
92
+ print(f"{filename}: {df.shape}")
93
+ ```
94
+
95
+ ### DLIS Files
96
+
97
+ ```python
98
+ from LASMnemonicsID import parseDLIS
99
+
100
+ # Parse single DLIS file
101
+ df = parseDLIS("well.dlis")
102
+ print(df.columns)
103
+
104
+ # Parse directory
105
+ data = parseDLIS("/path/to/dlis/files/")
106
+ ```
107
+
108
+ ### ASCII/CSV/TXT Files
109
+
110
+ ```python
111
+ from LASMnemonicsID import parseASCII
112
+
113
+ # Parse CSV
114
+ df = parseASCII("well_log.csv", depth_col="DEPTH")
115
+
116
+ # Parse tab-separated TXT
117
+ df = parseASCII("well_log.txt", delimiter="\t")
118
+
119
+ # Parse directory
120
+ data = parseASCII("/path/to/csv/files/")
121
+ ```
122
+
123
+ ---
124
+
125
+ ## Advanced Usage
126
+
127
+ ### Custom Preferred Names
128
+
129
+ ```python
130
+ preferred = {
131
+ "deepres": "RT",
132
+ "deepres_preferred_original": "AT90",
133
+ "gamma": "GR"
134
+ }
135
+
136
+ df = parseLAS("well.las", preferred_names=preferred)
137
+ ```
138
+
139
+ ### Batch Processing
140
+
141
+ ```python
142
+ from pathlib import Path
143
+
144
+ dir_path = Path("/data/wells/")
145
+ data = parseLAS(dir_path, verbose=True, preferred_names=preferred)
146
+
147
+ for fname, df in data.items():
148
+ print(f"{fname}: {df.shape}")
149
+ print(df.head(3))
150
+ ```
151
+
152
+ ### Mixed Format Directories
153
+
154
+ ```python
155
+ las_data = parseLAS("/data/wells/")
156
+ dlis_data = parseDLIS("/data/wells/")
157
+ ascii_data = parseASCII("/data/wells/")
158
+
159
+ all_data = {**las_data, **dlis_data, **ascii_data}
160
+ ```
161
+
162
+ ---
163
+
164
+ ## Supported Mnemonics
165
+
166
+ The package automatically standardizes these curve types:
167
+
168
+ | Curve Type | Standard Name | Example Aliases |
169
+ |------------|---------------|-------------|
170
+ | Gamma Ray | `GR` | gr, cggr, cgr, gam, gamma, gammaray, grc, grd, hgr, sgr, lgr, pgr |
171
+ | Spontaneous Potential | `SP` | sp, idsp, spr, spl, spdl, spdhp, spc, sp0, sp1, cgsp, dlsp |
172
+ | Caliper | `CALI` | caliper, calip, cal, dcal, acal, cala, cald, cale, calh, hcal, xcal, ycal |
173
+ | Deep Resistivity | `RT` | rt, rtao, rt90, ild, idph, rild, rd, ae90, at90, atrt, lld, lldc, res, resd |
174
+ | Shallow Resistivity | `RXO` | rxo, rxoz, msfl, mcfl, sflcc, mgl, m1rx, r40o, aht10 |
175
+ | Density | `RHOB` | rhob, rhoz, den, denb, denc, hrho, hrhob, zden, hden, denf, denn |
176
+ | Density Correction | `DRHO` | dcor, dcorr, dc, decr, drh, zcor, zcorr, hhdr, denscorr |
177
+ | Neutron Porosity | `NPHI` | cn, phin, cnc, cns, hnphi, nphi, npor, cncc, nprl, neut, neutpor |
178
+ | Sonic (Compressional) | `DT` | dt, dtc, dtco, dtcomp, deltat, slow, slowness, tt, ac, acco, delt, dtcomp |
179
+ | Sonic (Shear) | `DTS` | dts, dtsh, dtsm, dtsc, dtsd, dtsqi, dtshear, deltas, tts, stt, dtshear |
180
+ | Photoelectric Factor | `PEF` | pe, pef, pefz, pdpe, pedf, pedn, hpedn, pe2, pef8, lpe |
181
+
182
+
183
+ ---
184
+
185
+ ## Testing
186
+
187
+ ```bash
188
+ pytest tests/ -v
189
+ pytest tests/test_las.py -v
190
+ pytest tests/test_dlis.py -v
191
+ pytest tests/test_ascii.py -v
192
+ ```
193
+
194
+ ---
195
+
196
+ ## API Reference
197
+
198
+ ### parseLAS(input_path, verbose=True, preferred_names=None)
199
+
200
+ Parse LAS file(s) and standardize mnemonics.
201
+
202
+ **Parameters:**
203
+ - input_path (str/Path): LAS file or directory
204
+ - verbose (bool): Print parsing info
205
+ - preferred_names (dict): Custom name mappings
206
+
207
+ **Returns:** DataFrame (single file) or dict (multiple files)
208
+
209
+ ### parseDLIS(input_path, verbose=True, preferred_names=None)
210
+
211
+ Parse DLIS file(s) and standardize mnemonics.
212
+
213
+ **Parameters:**
214
+ - input_path (str/Path): DLIS file or directory
215
+ - verbose (bool): Print parsing info
216
+ - preferred_names (dict): Custom name mappings
217
+
218
+ **Returns:** DataFrame (single file) or dict (multiple files)
219
+
220
+ ### parseASCII(input_path, verbose=True, preferred_names=None, depth_col="DEPTH", delimiter=",")
221
+
222
+ Parse ASCII/CSV/TXT file(s) and standardize mnemonics.
223
+
224
+ **Parameters:**
225
+ - input_path (str/Path): ASCII file or directory
226
+ - verbose (bool): Print parsing info
227
+ - preferred_names (dict): Custom name mappings
228
+ - depth_col (str): Name of depth column
229
+ - delimiter (str): Field separator
230
+
231
+ **Returns:** DataFrame (single file) or dict (multiple files)
232
+
233
+ ---
234
+
235
+ ## How to Cite
236
+
237
+ **APA**
238
+
239
+ > Nobleza Energy. (2026). LASMnemonicsID: Well log mnemonic identification for LAS, DLIS, and ASCII formats [Software]. GitHub. https://github.com/Nobleza-Energy/LASMnemonicsID
240
+
241
+ **BibTeX**
242
+
243
+ ```bibtex
244
+ @software{LASMnemonicsID,
245
+ author = {Nobleza Energy},
246
+ title = {LASMnemonicsID: Well log mnemonic identification for LAS, DLIS, and ASCII formats},
247
+ year = {2026},
248
+ publisher = {GitHub},
249
+ url = {https://github.com/Nobleza-Energy/LASMnemonicsID}
250
+ }
251
+ ```
252
+
253
+ ---
254
+
255
+ ## License
256
+
257
+ MIT License - see [LICENSE](LICENSE) file.
258
+
259
+ ---
260
+
261
+ ## Contributing
262
+
263
+ Contributions welcome! Submit a Pull Request.
264
+
265
+ ---
266
+
267
+ ## Support
268
+
269
+ - **Issues:** [GitHub Issues](https://github.com/Nobleza-Energy/LASMnemonicsID/issues)
270
+ - **Discussions:** [GitHub Discussions](https://github.com/Nobleza-Energy/LASMnemonicsID/discussions)
271
+
272
+ ---
273
+
274
+ <p align="center">
275
+ Made with ❤️ by <a href="https://nobleza-energy.com">Nobleza Energy</a>
276
+ </p>
@@ -0,0 +1,14 @@
1
+ LASMnemonicsID/__init__.py,sha256=gsIy4CT1aZrfdm7yngD5CyGx0c0VrUS-2kL9aPMWqNU,294
2
+ LASMnemonicsID/ASCII/ASCII.py,sha256=55_KyVai0W6WABAav-tqx5XvZOOrnxYspjoAbcbF0ws,4260
3
+ LASMnemonicsID/ASCII/__init__.py,sha256=hvl0pDTh7kEEGMi2D36hw17ftnWY6SOM3IkxBNcaaLQ,22
4
+ LASMnemonicsID/DLIS/DLIS.py,sha256=byYJoKAiahlKUORt5c_iZZ4aQmjdiDIXLp2Qr7Hbiyk,4663
5
+ LASMnemonicsID/DLIS/__init__.py,sha256=UVM8tn6cpFT8H-wl7mqMDiE8GObkIWbYR-FGq4Oqoj8,29
6
+ LASMnemonicsID/LAS/LAS.py,sha256=gxeLlARZJV3ECxIQaoqO8YeOUfnlMUBKXqRFY-JivCs,6048
7
+ LASMnemonicsID/LAS/__init__.py,sha256=dTM87nn0zNUaKp29HocOODJT_-VM1CZED9Ar_FSOr-4,232
8
+ LASMnemonicsID/utils/__init__.py,sha256=ree81DUTsdjXfO3h-q7YyNrV6mTIKSGxgWPWGGTSVU0,1388
9
+ LASMnemonicsID/utils/mnemonics.py,sha256=VU25CXmQvUo0sS3Y6kG_G7KwRE2CiuoJeC7LT6FmNzg,7283
10
+ lasmnemonicsid-0.0.5.dist-info/licenses/LICENSE,sha256=6r9JOUiNw1exfcc0jlOi50fDStidfqyQ2PAYQh4lzEQ,1071
11
+ lasmnemonicsid-0.0.5.dist-info/METADATA,sha256=F9TC9atDWz404-_5_4V6L73WTkYJrv1iIbhr6osc2xA,7852
12
+ lasmnemonicsid-0.0.5.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
13
+ lasmnemonicsid-0.0.5.dist-info/top_level.txt,sha256=bdt6EHMrwbzFA9jA_xbTqRrOV6T4zDs3QojjEz8HSBk,15
14
+ lasmnemonicsid-0.0.5.dist-info/RECORD,,
@@ -1,110 +0,0 @@
1
- Metadata-Version: 2.4
2
- Name: lasmnemonicsid
3
- Version: 0.0.3rc0
4
- Summary: Well log mnemonic identification using lasio and dlisio to load LAS/DLIS files into DataFrames
5
- Author-email: Nobleza Energy <info@nobleza-energy.com>
6
- License: MIT
7
- Project-URL: Homepage, https://nobleza-energy.github.io/LASMnemonicsID
8
- Project-URL: Repository, https://github.com/Nobleza-Energy/LASMnemonicsID
9
- Project-URL: Documentation, https://nobleza-energy.github.io/LASMnemonicsID/
10
- Project-URL: Bug Tracker, https://github.com/Nobleza-Energy/LASMnemonicsID/issues
11
- Classifier: Development Status :: 3 - Alpha
12
- Classifier: Intended Audience :: Science/Research
13
- Classifier: License :: OSI Approved :: MIT License
14
- Classifier: Operating System :: OS Independent
15
- Classifier: Programming Language :: Python :: 3
16
- Classifier: Programming Language :: Python :: 3.10
17
- Classifier: Programming Language :: Python :: 3.11
18
- Classifier: Programming Language :: Python :: 3.12
19
- Classifier: Topic :: Scientific/Engineering
20
- Requires-Python: >=3.10
21
- Description-Content-Type: text/markdown
22
- License-File: LICENSE
23
- Requires-Dist: numpy>=1.21.0
24
- Requires-Dist: pandas>=2.0.1
25
- Requires-Dist: lasio>=0.30
26
- Requires-Dist: dlisio>=1.0.0
27
- Provides-Extra: docs
28
- Requires-Dist: mkdocs>=1.5.0; extra == "docs"
29
- Requires-Dist: mkdocs-material>=9.0.0; extra == "docs"
30
- Requires-Dist: mkdocstrings[python]>=0.24.0; extra == "docs"
31
- Provides-Extra: dev
32
- Requires-Dist: pytest>=7.0.0; extra == "dev"
33
- Requires-Dist: black>=23.0.0; extra == "dev"
34
- Requires-Dist: isort>=5.12.0; extra == "dev"
35
- Requires-Dist: flake8>=6.0.0; extra == "dev"
36
- Requires-Dist: pytest-cov>=4.0.0; extra == "dev"
37
- Dynamic: license-file
38
-
39
- <p align="center">
40
- <img src="https://raw.githubusercontent.com/Nobleza-Energy/LASMnemonicsID/main/logo.png" alt="LASMnemonicsID Logo" width="200"/>
41
- </p>
42
-
43
- <h1 align="center">LASMnemonicsID</h1>
44
-
45
- <p align="center">
46
- <b>Well log mnemonic identification using lasio and dlisio</b>
47
- </p>
48
-
49
- <p align="center">
50
- <a href="https://pypi.org/project/lasmnemonicsid/"><img src="https://img.shields.io/pypi/v/lasmnemonicsid.svg" alt="PyPI"></a>
51
- <a href="https://pypi.org/project/lasmnemonicsid/"><img src="https://img.shields.io/pypi/pyversions/lasmnemonicsid.svg" alt="Python Versions"></a>
52
- <a href="https://github.com/Nobleza-Energy/LASMnemonicsID/blob/main/LICENSE"><img src="https://img.shields.io/github/license/Nobleza-Energy/LASMnemonicsID.svg" alt="License"></a>
53
- </p>
54
-
55
- ---
56
-
57
- ## 📦 Installation
58
-
59
- ```bash
60
- pip install lasmnemonicsid
61
- ```
62
-
63
- ## 🚀 QuickStart
64
-
65
- ```python
66
- from LASMnemonicsID.LAS import parseLAS
67
-
68
- # Load LAS file
69
- df = parseLAS("your_well.las")
70
- print(df.head())
71
- ```
72
-
73
- ## 🧪 Test with your Data: Multiple files will load into a dictionary
74
-
75
- ```python
76
- from LASMnemonicsID.LAS import parseLAS
77
-
78
- # Load all .las within the Directory → {filename: df}
79
- data = parseLAS("/path/to/your/data/")
80
- print("Files:", list(data.keys()))
81
-
82
- # Dataframes
83
- df = parseLAS('/path/to/yourfile.las')
84
- print(df.head())
85
- ```
86
-
87
- ## 📈 Star History
88
-
89
- [![Star History Chart](https://api.star-history.com/svg?repos=Nobleza-Energy/LASMnemonicsID&type=Date)](https://star-history.com/#Nobleza-Energy/LASMnemonicsID&Date)
90
-
91
-
92
- ## 📄 How to Cite
93
-
94
- If you use `LASMnemonicsID` in your research or project, please cite it as follows:
95
-
96
- **APA**
97
-
98
- > Nobleza Energy. (2025). LASMnemonicsID: Well log mnemonic identification using lasio and dlisio [Software]. GitHub. https://github.com/Nobleza-Energy/LASMnemonicsID
99
-
100
- **BibTeX**
101
-
102
- ```bibtex
103
- @software{LASMnemonicsID,
104
- author = {Nobleza Energy},
105
- title = {LASMnemonicsID: Well log mnemonic identification using lasio and dlisio},
106
- year = {2025},
107
- publisher = {GitHub},
108
- journal = {GitHub repository},
109
- url = {https://github.com/Nobleza-Energy/LASMnemonicsID}
110
- }
@@ -1,11 +0,0 @@
1
- LASMnemonicsID/__init__.py,sha256=IjJHoiHWr1CfP3K01xW61UhnJYP_9LOOaCqJnhLFlPc,309
2
- LASMnemonicsID/DLIS/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
- LASMnemonicsID/LAS/LAS.py,sha256=gxeLlARZJV3ECxIQaoqO8YeOUfnlMUBKXqRFY-JivCs,6048
4
- LASMnemonicsID/LAS/__init__.py,sha256=dTM87nn0zNUaKp29HocOODJT_-VM1CZED9Ar_FSOr-4,232
5
- LASMnemonicsID/utils/__init__.py,sha256=ree81DUTsdjXfO3h-q7YyNrV6mTIKSGxgWPWGGTSVU0,1388
6
- LASMnemonicsID/utils/mnemonics.py,sha256=VU25CXmQvUo0sS3Y6kG_G7KwRE2CiuoJeC7LT6FmNzg,7283
7
- lasmnemonicsid-0.0.3rc0.dist-info/licenses/LICENSE,sha256=6r9JOUiNw1exfcc0jlOi50fDStidfqyQ2PAYQh4lzEQ,1071
8
- lasmnemonicsid-0.0.3rc0.dist-info/METADATA,sha256=AXUCpIS5uLGwI6yGlBIctpyI0EN-KZGo5u6cimjOi_E,3706
9
- lasmnemonicsid-0.0.3rc0.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
10
- lasmnemonicsid-0.0.3rc0.dist-info/top_level.txt,sha256=bdt6EHMrwbzFA9jA_xbTqRrOV6T4zDs3QojjEz8HSBk,15
11
- lasmnemonicsid-0.0.3rc0.dist-info/RECORD,,