vtool-ibeis 2.2.0__py3-none-any.whl → 2.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vtool_ibeis/__init__.py +1 -4
- vtool_ibeis/_rhomb_dist.py +0 -2
- vtool_ibeis/blend.py +1 -5
- vtool_ibeis/chip.py +2 -5
- vtool_ibeis/clustering2.py +5 -8
- vtool_ibeis/confusion.py +6 -8
- vtool_ibeis/coverage_grid.py +2 -5
- vtool_ibeis/coverage_kpts.py +18 -19
- vtool_ibeis/demodata.py +5 -7
- vtool_ibeis/distance.py +9 -14
- vtool_ibeis/ellipse.py +2 -5
- vtool_ibeis/exif.py +5 -10
- vtool_ibeis/features.py +3 -6
- vtool_ibeis/fontdemo.py +0 -3
- vtool_ibeis/geometry.py +8 -18
- vtool_ibeis/histogram.py +7 -10
- vtool_ibeis/image.py +57 -54
- vtool_ibeis/image_filters.py +14 -13
- vtool_ibeis/image_shared.py +1 -4
- vtool_ibeis/inspect_matches.py +2 -4
- vtool_ibeis/keypoint.py +12 -23
- vtool_ibeis/linalg.py +0 -1
- vtool_ibeis/matching.py +19 -17
- vtool_ibeis/nearest_neighbors.py +13 -5
- vtool_ibeis/numpy_utils.py +6 -10
- vtool_ibeis/other.py +9 -12
- vtool_ibeis/patch.py +13 -37
- vtool_ibeis/quality_classifier.py +2 -3
- vtool_ibeis/score_normalization.py +3 -6
- vtool_ibeis/segmentation.py +7 -9
- vtool_ibeis/spatial_verification.py +12 -11
- vtool_ibeis/symbolic.py +0 -2
- vtool_ibeis/trig.py +0 -1
- vtool_ibeis/util_math.py +3 -6
- {vtool_ibeis-2.2.0.dist-info → vtool_ibeis-2.3.0.dist-info}/LICENSE +4 -4
- vtool_ibeis-2.3.0.dist-info/METADATA +561 -0
- vtool_ibeis-2.3.0.dist-info/RECORD +42 -0
- {vtool_ibeis-2.2.0.dist-info → vtool_ibeis-2.3.0.dist-info}/WHEEL +1 -1
- vtool_ibeis/_old_matching.py +0 -262
- vtool_ibeis-2.2.0.dist-info/METADATA +0 -281
- vtool_ibeis-2.2.0.dist-info/RECORD +0 -43
- {vtool_ibeis-2.2.0.dist-info → vtool_ibeis-2.3.0.dist-info}/top_level.txt +0 -0
vtool_ibeis/nearest_neighbors.py
CHANGED
|
@@ -3,7 +3,6 @@ Wrapper around flann (with caching)
|
|
|
3
3
|
|
|
4
4
|
python -c "import vtool_ibeis, doctest; print(doctest.testmod(vtool_ibeis.nearest_neighbors))"
|
|
5
5
|
"""
|
|
6
|
-
from __future__ import absolute_import, division, print_function
|
|
7
6
|
from os.path import exists, normpath, join
|
|
8
7
|
import utool as ut
|
|
9
8
|
import ubelt as ub
|
|
@@ -39,8 +38,8 @@ class AnnoyWrapper(object):
|
|
|
39
38
|
def nn_index(self, qvecs, num_neighbs, checks=None):
|
|
40
39
|
if checks is None:
|
|
41
40
|
checks = self.params['checks']
|
|
42
|
-
idxs = np.empty((len(qvecs), num_neighbs), dtype=
|
|
43
|
-
dists = np.empty((len(qvecs), num_neighbs), dtype=
|
|
41
|
+
idxs = np.empty((len(qvecs), num_neighbs), dtype=int)
|
|
42
|
+
dists = np.empty((len(qvecs), num_neighbs), dtype=float)
|
|
44
43
|
for i, qvec in enumerate(qvecs):
|
|
45
44
|
idxs[i], dists[i] = self.ann.get_nns_by_vector(
|
|
46
45
|
qvec, n=num_neighbs, search_k=checks, include_distances=True)
|
|
@@ -110,8 +109,10 @@ def test_cv2_flann():
|
|
|
110
109
|
from vtool_ibeis import demodata
|
|
111
110
|
import plottool_ibeis as pt
|
|
112
111
|
import vtool_ibeis as vt
|
|
113
|
-
img1 = vt.imread(ut.grab_test_imgpath('easy1.png'))
|
|
114
|
-
img2 = vt.imread(ut.grab_test_imgpath('easy2.png'))
|
|
112
|
+
# img1 = vt.imread(ut.grab_test_imgpath('easy1.png'))
|
|
113
|
+
# img2 = vt.imread(ut.grab_test_imgpath('easy2.png'))
|
|
114
|
+
img1 = vt.imread(ut.grab_test_imgpath('tsukuba_l'))
|
|
115
|
+
img2 = vt.imread(ut.grab_test_imgpath('tsukuba_r'))
|
|
115
116
|
|
|
116
117
|
stereo = cv2.StereoBM_create(numDisparities=16, blockSize=15)
|
|
117
118
|
disparity = stereo.compute(img1, img2)
|
|
@@ -176,6 +177,7 @@ def ann_flann_once(dpts, qpts, num_neighbors, flann_params={}):
|
|
|
176
177
|
|
|
177
178
|
Example0:
|
|
178
179
|
>>> # ENABLE_DOCTEST
|
|
180
|
+
>>> # xdoctest: +REQUIRES(module:pyflann_ibeis)
|
|
179
181
|
>>> from vtool_ibeis.nearest_neighbors import * # NOQA
|
|
180
182
|
>>> np.random.seed(1)
|
|
181
183
|
>>> dpts = np.random.randint(0, 255, (5, 128)).astype(np.uint8)
|
|
@@ -196,6 +198,7 @@ def ann_flann_once(dpts, qpts, num_neighbors, flann_params={}):
|
|
|
196
198
|
>>> # ENABLE_DOCTEST
|
|
197
199
|
>>> # Test upper bounds on sift descriptors
|
|
198
200
|
>>> # SeeAlso distance.understanding_pseudomax_props
|
|
201
|
+
>>> # xdoctest: +REQUIRES(module:pyflann_ibeis)
|
|
199
202
|
>>> from vtool_ibeis.nearest_neighbors import * # NOQA
|
|
200
203
|
>>> import vtool_ibeis as vt
|
|
201
204
|
>>> import numpy as np
|
|
@@ -301,6 +304,7 @@ def get_flann_cfgstr(dpts, flann_params, cfgstr='', use_params_hash=True,
|
|
|
301
304
|
|
|
302
305
|
Example:
|
|
303
306
|
>>> # ENABLE_DOCTEST
|
|
307
|
+
>>> # xdoctest: +REQUIRES(module:pyflann_ibeis)
|
|
304
308
|
>>> from vtool_ibeis.nearest_neighbors import * # NOQA
|
|
305
309
|
>>> rng = np.random.RandomState(1)
|
|
306
310
|
>>> dpts = rng.randint(0, 255, (10, 128)).astype(np.uint8)
|
|
@@ -404,6 +408,7 @@ def flann_augment(dpts, new_dpts, cache_dir, cfgstr, new_cfgstr, flann_params,
|
|
|
404
408
|
"""
|
|
405
409
|
Example:
|
|
406
410
|
>>> # DISABLE_DOCTEST
|
|
411
|
+
>>> # xdoctest: +REQUIRES(module:pyflann_ibeis)
|
|
407
412
|
>>> from vtool_ibeis.nearest_neighbors import * # NOQA
|
|
408
413
|
>>> import vtool_ibeis.demodata as demodata # NOQA
|
|
409
414
|
>>> dpts = demodata.get_dummy_dpts(ut.get_nth_prime(10))
|
|
@@ -452,6 +457,7 @@ def get_flann_params(algorithm='kdtree', **kwargs):
|
|
|
452
457
|
|
|
453
458
|
Example:
|
|
454
459
|
>>> # ENABLE_DOCTEST
|
|
460
|
+
>>> # xdoctest: +REQUIRES(module:pyflann_ibeis)
|
|
455
461
|
>>> from vtool_ibeis.nearest_neighbors import * # NOQA
|
|
456
462
|
>>> algorithm = ut.get_argval('--algo', default='kdtree')
|
|
457
463
|
>>> flann_params = get_flann_params(algorithm)
|
|
@@ -660,6 +666,7 @@ def flann_index_time_experiment():
|
|
|
660
666
|
Example:
|
|
661
667
|
>>> # SLOW_DOCTEST
|
|
662
668
|
>>> # xdoctest: +SKIP
|
|
669
|
+
>>> # xdoctest: +REQUIRES(module:pyflann_ibeis)
|
|
663
670
|
>>> from vtool_ibeis.nearest_neighbors import * # NOQA
|
|
664
671
|
>>> result = flann_index_time_experiment()
|
|
665
672
|
>>> print(result)
|
|
@@ -755,6 +762,7 @@ def invertible_stack(vecs_list, label_list):
|
|
|
755
762
|
|
|
756
763
|
Example:
|
|
757
764
|
>>> # DISABLE_DOCTEST
|
|
765
|
+
>>> # xdoctest: +REQUIRES(module:pyflann_ibeis)
|
|
758
766
|
>>> from vtool_ibeis.nearest_neighbors import * # NOQA
|
|
759
767
|
>>> DESC_TYPE = np.uint8
|
|
760
768
|
>>> label_list = [1, 2, 3, 4, 5]
|
vtool_ibeis/numpy_utils.py
CHANGED
|
@@ -1,13 +1,9 @@
|
|
|
1
|
-
# -*- coding: utf-8 -*
|
|
2
1
|
"""
|
|
3
2
|
These functions might be PR quality for numpy.
|
|
4
3
|
"""
|
|
5
|
-
from __future__ import absolute_import, division, print_function, unicode_literals
|
|
6
4
|
import numpy as np
|
|
7
|
-
import utool as ut
|
|
8
|
-
import ubelt as ub
|
|
9
|
-
from six import next
|
|
10
|
-
from six.moves import zip, range
|
|
5
|
+
import utool as ut # NOQA
|
|
6
|
+
import ubelt as ub # NOQA
|
|
11
7
|
|
|
12
8
|
|
|
13
9
|
def atleast_nd(arr, n, tofront=False):
|
|
@@ -121,7 +117,7 @@ def fromiter_nd(iter_, shape, dtype):
|
|
|
121
117
|
Example:
|
|
122
118
|
>>> # ENABLE_DOCTEST
|
|
123
119
|
>>> from vtool_ibeis.numpy_utils import * # NOQA
|
|
124
|
-
>>> dtype =
|
|
120
|
+
>>> dtype = float
|
|
125
121
|
>>> total = 11
|
|
126
122
|
>>> rng = np.random.RandomState(0)
|
|
127
123
|
>>> iter_ = (rng.rand(5, 7, 3) for _ in range(total))
|
|
@@ -132,7 +128,7 @@ def fromiter_nd(iter_, shape, dtype):
|
|
|
132
128
|
Example:
|
|
133
129
|
>>> # ENABLE_DOCTEST
|
|
134
130
|
>>> from vtool_ibeis.numpy_utils import * # NOQA
|
|
135
|
-
>>> dtype =
|
|
131
|
+
>>> dtype = int
|
|
136
132
|
>>> qfxs = np.array([1, 2, 3])
|
|
137
133
|
>>> dfxs = np.array([4, 5, 6])
|
|
138
134
|
>>> iter_ = (np.array(x) for x in ut.product(qfxs, dfxs))
|
|
@@ -148,7 +144,7 @@ def fromiter_nd(iter_, shape, dtype):
|
|
|
148
144
|
>>> sigma = 500
|
|
149
145
|
>>> n_data = 1000
|
|
150
146
|
>>> rng = np.random.RandomState(42)
|
|
151
|
-
>>> n_feat_list = np.clip(rng.randn(n_data) * sigma + mu, 0, np.inf).astype(
|
|
147
|
+
>>> n_feat_list = np.clip(rng.randn(n_data) * sigma + mu, 0, np.inf).astype(int)
|
|
152
148
|
>>> # Make a large list of vectors of various sizes
|
|
153
149
|
>>> print('Making random vectors')
|
|
154
150
|
>>> vecs_list = [(rng.rand(num, feat_dim) * 255).astype(dtype) for num in n_feat_list]
|
|
@@ -229,7 +225,7 @@ def index_to_boolmask(index_list, maxval=None, isflat=True):
|
|
|
229
225
|
#assert index_list.min() >= 0
|
|
230
226
|
if maxval is None:
|
|
231
227
|
maxval = index_list.max()
|
|
232
|
-
mask = np.zeros(maxval, dtype=
|
|
228
|
+
mask = np.zeros(maxval, dtype=bool)
|
|
233
229
|
if not isflat:
|
|
234
230
|
# assumes non-flat
|
|
235
231
|
mask.__setitem__(tuple(index_list.T), True)
|
vtool_ibeis/other.py
CHANGED
|
@@ -1,11 +1,7 @@
|
|
|
1
|
-
# -*- coding: utf-8 -*
|
|
2
|
-
from __future__ import absolute_import, division, print_function, unicode_literals
|
|
3
1
|
import numpy as np
|
|
4
2
|
import utool as ut
|
|
5
3
|
import ubelt as ub
|
|
6
4
|
import functools # NOQA
|
|
7
|
-
from six import next
|
|
8
|
-
from six.moves import zip, range
|
|
9
5
|
|
|
10
6
|
|
|
11
7
|
def safe_vstack(tup, default_shape=(0,), default_dtype=float):
|
|
@@ -456,7 +452,7 @@ def compute_unique_data_ids_(hashable_rows, iddict_=None):
|
|
|
456
452
|
for row in hashable_rows:
|
|
457
453
|
if row not in iddict_:
|
|
458
454
|
iddict_[row] = len(iddict_)
|
|
459
|
-
dataid_list =
|
|
455
|
+
dataid_list = list(ub.take(iddict_, hashable_rows))
|
|
460
456
|
return dataid_list
|
|
461
457
|
|
|
462
458
|
|
|
@@ -1143,8 +1139,8 @@ def flag_intersection(arr1, arr2):
|
|
|
1143
1139
|
"""
|
|
1144
1140
|
import vtool_ibeis as vt
|
|
1145
1141
|
if arr1.size == 0 or arr2.size == 0:
|
|
1146
|
-
flags = np.full(arr1.shape[0], False, dtype=
|
|
1147
|
-
#return np.empty((0,), dtype=
|
|
1142
|
+
flags = np.full(arr1.shape[0], False, dtype=bool)
|
|
1143
|
+
#return np.empty((0,), dtype=bool)
|
|
1148
1144
|
else:
|
|
1149
1145
|
# flags = np.logical_or.reduce([arr1 == row for row in arr2]).T[0]
|
|
1150
1146
|
flags = vt.iter_reduce_ufunc(np.logical_or, (arr1 == row_ for row_ in arr2)).ravel()
|
|
@@ -1357,6 +1353,7 @@ def get_uncovered_mask(covered_array, covering_array):
|
|
|
1357
1353
|
>>> flags = get_uncovered_mask(covered_array, covering_array)
|
|
1358
1354
|
>>> result = ub.repr2(flags, with_dtype=True)
|
|
1359
1355
|
>>> print(result)
|
|
1356
|
+
|
|
1360
1357
|
np.array([[ True, False, True],
|
|
1361
1358
|
[False, False, True],
|
|
1362
1359
|
[ True, True, True]], dtype=bool)
|
|
@@ -1372,13 +1369,13 @@ def get_uncovered_mask(covered_array, covering_array):
|
|
|
1372
1369
|
"""
|
|
1373
1370
|
import vtool_ibeis as vt
|
|
1374
1371
|
if len(covering_array) == 0:
|
|
1375
|
-
return np.ones(np.shape(covered_array), dtype=
|
|
1372
|
+
return np.ones(np.shape(covered_array), dtype=bool)
|
|
1376
1373
|
else:
|
|
1377
1374
|
flags_iter = (np.not_equal(covered_array, item) for item in covering_array)
|
|
1378
1375
|
mask_array = vt.iter_reduce_ufunc(np.logical_and, flags_iter)
|
|
1379
1376
|
return mask_array
|
|
1380
1377
|
#if len(covering_array) == 0:
|
|
1381
|
-
# return np.ones(np.shape(covered_array), dtype=
|
|
1378
|
+
# return np.ones(np.shape(covered_array), dtype=bool)
|
|
1382
1379
|
#else:
|
|
1383
1380
|
# flags_list = (np.not_equal(covered_array, item) for item in covering_array)
|
|
1384
1381
|
# mask_array = and_lists(*flags_list)
|
|
@@ -1387,7 +1384,7 @@ def get_uncovered_mask(covered_array, covering_array):
|
|
|
1387
1384
|
|
|
1388
1385
|
#def get_uncovered_mask2(covered_array, covering_array):
|
|
1389
1386
|
# if len(covering_array) == 0:
|
|
1390
|
-
# return np.ones(np.shape(covered_array), dtype=
|
|
1387
|
+
# return np.ones(np.shape(covered_array), dtype=bool)
|
|
1391
1388
|
# else:
|
|
1392
1389
|
# flags_iter = (np.not_equal(covered_array, item) for item in covering_array)
|
|
1393
1390
|
# mask_array = vt.iter_reduce_ufunc(np.logical_and, flags_iter)
|
|
@@ -2419,7 +2416,7 @@ def fromiter_nd(iter_, shape, dtype):
|
|
|
2419
2416
|
Example:
|
|
2420
2417
|
>>> # ENABLE_DOCTEST
|
|
2421
2418
|
>>> from vtool_ibeis.other import * # NOQA
|
|
2422
|
-
>>> dtype =
|
|
2419
|
+
>>> dtype = int
|
|
2423
2420
|
>>> qfxs = np.array([1, 2, 3])
|
|
2424
2421
|
>>> dfxs = np.array([4, 5, 6])
|
|
2425
2422
|
>>> iter_ = (np.array(x) for x in ut.product(qfxs, dfxs))
|
|
@@ -2496,7 +2493,7 @@ def take_col_per_row(arr, colx_list):
|
|
|
2496
2493
|
num_cols = 4
|
|
2497
2494
|
|
|
2498
2495
|
arr = np.arange(10 * 4).reshape(10, 4)
|
|
2499
|
-
colx_list = (np.random.rand(10) * 4).astype(
|
|
2496
|
+
colx_list = (np.random.rand(10) * 4).astype(int)
|
|
2500
2497
|
|
|
2501
2498
|
%timeit np.array([row[cx] for (row, cx) in zip(arr, colx_list)])
|
|
2502
2499
|
%timeit arr.ravel().take(np.ravel_multi_index((np.arange(len(colx_list)), colx_list), arr.shape))
|
vtool_ibeis/patch.py
CHANGED
|
@@ -1,8 +1,3 @@
|
|
|
1
|
-
# -*- coding: utf-8 -*-
|
|
2
|
-
# LICENCE
|
|
3
|
-
from __future__ import absolute_import, division, print_function, unicode_literals
|
|
4
|
-
import six # NOQA
|
|
5
|
-
from six.moves import zip
|
|
6
1
|
import numpy as np
|
|
7
2
|
from vtool_ibeis import histogram as htool
|
|
8
3
|
from vtool_ibeis import keypoint as ktool
|
|
@@ -575,7 +570,7 @@ def get_warped_patches(img, kpts, flags=cv2.INTER_LANCZOS4,
|
|
|
575
570
|
>>> from vtool_ibeis.patch import * # NOQA
|
|
576
571
|
>>> import vtool_ibeis as vt
|
|
577
572
|
>>> # build test data
|
|
578
|
-
>>> img_fpath = ut.grab_test_imgpath('carl
|
|
573
|
+
>>> img_fpath = ut.grab_test_imgpath('carl')
|
|
579
574
|
>>> img = vt.imread(img_fpath)
|
|
580
575
|
>>> use_cpp = ut.get_argflag('--use_cpp')
|
|
581
576
|
>>> kpts, desc = vt.extract_features(img_fpath)
|
|
@@ -732,7 +727,7 @@ def intern_warp_single_patch(img, x, y, ori, V,
|
|
|
732
727
|
X = ltool.translation_mat3x3(half_patch_size, half_patch_size) # Translate back to patch-image coordinates
|
|
733
728
|
M = X.dot(S).dot(R).dot(V).dot(T)
|
|
734
729
|
# Prepare to warp
|
|
735
|
-
dsize = np.ceil([patch_size, patch_size]).astype(
|
|
730
|
+
dsize = np.ceil([patch_size, patch_size]).astype(int)
|
|
736
731
|
# Warp
|
|
737
732
|
#warped_patch = gtool.warpAffine(img, M, dsize)
|
|
738
733
|
warped_patch = cv2.warpAffine(img, M[0:2], tuple(dsize), **cv2_warp_kwargs)
|
|
@@ -781,7 +776,7 @@ def patch_gaussian_weighted_average_intensities(probchip, kpts_):
|
|
|
781
776
|
import vtool_ibeis as vt
|
|
782
777
|
patch_size = 41
|
|
783
778
|
M_iter = vt.generate_to_patch_transforms(kpts_, patch_size)
|
|
784
|
-
dsize = np.ceil([patch_size, patch_size]).astype(
|
|
779
|
+
dsize = np.ceil([patch_size, patch_size]).astype(int)
|
|
785
780
|
# Preallocate patch
|
|
786
781
|
patch = np.empty(dsize[::-1], dtype=np.uint8)
|
|
787
782
|
weighted_patch = np.empty(dsize[::-1], dtype=np.float64)
|
|
@@ -1070,19 +1065,14 @@ def draw_kp_ori_steps():
|
|
|
1070
1065
|
>>> draw_kp_ori_steps()
|
|
1071
1066
|
>>> pt.show_if_requested()
|
|
1072
1067
|
"""
|
|
1073
|
-
#from vtool_ibeis.patch import * # NOQA
|
|
1074
|
-
#import vtool_ibeis as vt
|
|
1075
1068
|
# build test data
|
|
1076
1069
|
import utool as ut
|
|
1077
1070
|
import plottool_ibeis as pt
|
|
1078
|
-
from six.moves import input
|
|
1079
|
-
import vtool_ibeis as vt
|
|
1080
1071
|
|
|
1081
1072
|
if True:
|
|
1082
1073
|
from ibeis.scripts.thesis import TMP_RC
|
|
1083
1074
|
import matplotlib as mpl
|
|
1084
1075
|
mpl.rcParams.update(TMP_RC)
|
|
1085
|
-
#import vtool_ibeis as vt
|
|
1086
1076
|
np.random.seed(0)
|
|
1087
1077
|
USE_COMMANLINE = True
|
|
1088
1078
|
if USE_COMMANLINE:
|
|
@@ -1091,24 +1081,16 @@ def draw_kp_ori_steps():
|
|
|
1091
1081
|
kp = kpts[fx]
|
|
1092
1082
|
else:
|
|
1093
1083
|
fx = 0
|
|
1094
|
-
|
|
1095
|
-
|
|
1096
|
-
|
|
1097
|
-
|
|
1098
|
-
|
|
1099
|
-
|
|
1100
|
-
|
|
1101
|
-
|
|
1102
|
-
|
|
1103
|
-
|
|
1104
|
-
imgBGR = get_test_patch('star2', jitter=True)
|
|
1105
|
-
#imgBGR = get_test_patch('cross', jitter=False)
|
|
1106
|
-
#imgBGR = cv2.resize(imgBGR, (41, 41), interpolation=cv2.INTER_LANCZOS4)
|
|
1107
|
-
imgBGR = cv2.resize(imgBGR, (41, 41), interpolation=cv2.INTER_CUBIC)
|
|
1108
|
-
theta = 0 # 3.4 # TAU / 16
|
|
1109
|
-
#kpts = make_test_image_keypoints(imgBGR, scale=.9, theta=theta)
|
|
1110
|
-
kpts = make_test_image_keypoints(imgBGR, scale=.3, theta=theta, shift=(.3, .1))
|
|
1111
|
-
kp = kpts[0]
|
|
1084
|
+
#imgBGR = get_test_patch('stripe', jitter=True)
|
|
1085
|
+
#imgBGR = get_test_patch('star', jitter=True)
|
|
1086
|
+
imgBGR = get_test_patch('star2', jitter=True)
|
|
1087
|
+
#imgBGR = get_test_patch('cross', jitter=False)
|
|
1088
|
+
#imgBGR = cv2.resize(imgBGR, (41, 41), interpolation=cv2.INTER_LANCZOS4)
|
|
1089
|
+
imgBGR = cv2.resize(imgBGR, (41, 41), interpolation=cv2.INTER_CUBIC)
|
|
1090
|
+
theta = 0 # 3.4 # TAU / 16
|
|
1091
|
+
#kpts = make_test_image_keypoints(imgBGR, scale=.9, theta=theta)
|
|
1092
|
+
kpts = make_test_image_keypoints(imgBGR, scale=.3, theta=theta, shift=(.3, .1))
|
|
1093
|
+
kp = kpts[0]
|
|
1112
1094
|
bins = 36
|
|
1113
1095
|
maxima_thresh = .8
|
|
1114
1096
|
converge_lists = []
|
|
@@ -1127,12 +1109,6 @@ def draw_kp_ori_steps():
|
|
|
1127
1109
|
internal_tup = ut.exec_func_src(find_patch_dominant_orientations, globals_, locals_, key_list=keys, update=True)
|
|
1128
1110
|
submax_ori_offsets = globals_['submax_ori_offsets']
|
|
1129
1111
|
new_oris = (old_ori + (submax_ori_offsets - ktool.GRAVITY_THETA)) % TAU
|
|
1130
|
-
# sourcecode = ut.get_func_sourcecode(find_patch_dominant_orientations, stripdef=True, stripret=True)
|
|
1131
|
-
# six.exec_(sourcecode, globals_, locals_)
|
|
1132
|
-
# submax_ori_offsets = locals_['submax_ori_offsets']
|
|
1133
|
-
# new_oris = (old_ori + (submax_ori_offsets - ktool.GRAVITY_THETA)) % TAU
|
|
1134
|
-
# keys = 'patch, gradx, grady, gmag, gori, hist, centers, gori_weights'.split(', ')
|
|
1135
|
-
# internal_tup = ut.dict_take(locals_, keys)
|
|
1136
1112
|
return new_oris, internal_tup
|
|
1137
1113
|
# </HACKISH>
|
|
1138
1114
|
|
|
@@ -4,7 +4,6 @@ References:
|
|
|
4
4
|
% Single-image noise level estimation for blind denoising.
|
|
5
5
|
% http://www.ok.ctrl.titech.ac.jp/res/NLE/TIP2013-noise-level-estimation06607209.pdfhttp://www.ok.ctrl.titech.ac.jp/res/NLE/TIP2013-noise-level-estimation06607209.pdf
|
|
6
6
|
"""
|
|
7
|
-
from __future__ import absolute_import, division, print_function, unicode_literals
|
|
8
7
|
import utool as ut
|
|
9
8
|
import ubelt as ub
|
|
10
9
|
import numpy as np
|
|
@@ -20,7 +19,7 @@ def compute_average_contrast(img):
|
|
|
20
19
|
>>> # ENABLE_DOCTEST
|
|
21
20
|
>>> from vtool_ibeis.quality_classifier import * # NOQA
|
|
22
21
|
>>> import vtool_ibeis as vt
|
|
23
|
-
>>> img_fpath = ut.grab_test_imgpath('carl
|
|
22
|
+
>>> img_fpath = ut.grab_test_imgpath('carl')
|
|
24
23
|
>>> img = vt.imread(img_fpath, grayscale=True)
|
|
25
24
|
>>> average_contrast, gradmag_sqrd = compute_average_contrast(img)
|
|
26
25
|
>>> # xdoctest: +REQUIRES(module:plottool_ibeis)
|
|
@@ -105,7 +104,7 @@ def fourier_devtest(img):
|
|
|
105
104
|
>>> # DISABLE_DOCTEST
|
|
106
105
|
>>> from vtool_ibeis.quality_classifier import * # NOQA
|
|
107
106
|
>>> import vtool_ibeis as vt
|
|
108
|
-
>>> img_fpath = ut.grab_test_imgpath('carl
|
|
107
|
+
>>> img_fpath = ut.grab_test_imgpath('carl')
|
|
109
108
|
>>> img = vt.imread(img_fpath, grayscale=True)
|
|
110
109
|
>>> magnitude_spectrum = fourier_devtest(img)
|
|
111
110
|
"""
|
|
@@ -1,5 +1,3 @@
|
|
|
1
|
-
# -*- coding: utf-8 -*-
|
|
2
|
-
from __future__ import absolute_import, division, print_function, unicode_literals
|
|
3
1
|
import numpy as np
|
|
4
2
|
import utool as ut
|
|
5
3
|
import ubelt as ub
|
|
@@ -813,7 +811,7 @@ def partition_scores(X, y, attrs=None):
|
|
|
813
811
|
>>> from vtool_ibeis.score_normalization import * # NOQA
|
|
814
812
|
>>> X = np.array([5, 6, 6, 7, 1, 2, 2])
|
|
815
813
|
>>> attrs = {'qaid': np.array([21, 24, 25, 26, 11, 14, 15])}
|
|
816
|
-
>>> y = np.array([1, 1, 1, 1, 0, 0, 0], dtype=
|
|
814
|
+
>>> y = np.array([1, 1, 1, 1, 0, 0, 0], dtype=bool)
|
|
817
815
|
>>> tup = partition_scores(X, y, attrs)
|
|
818
816
|
>>> resdict = ut.odict(zip(
|
|
819
817
|
>>> ['tp_scores', 'tn_scores', 'part_attrs'], tup))
|
|
@@ -827,7 +825,6 @@ def partition_scores(X, y, attrs=None):
|
|
|
827
825
|
False: {'qaid': np.array([11, 14, 15])},
|
|
828
826
|
True: {'qaid': np.array([21, 24, 25, 26])},
|
|
829
827
|
},
|
|
830
|
-
|
|
831
828
|
"""
|
|
832
829
|
import vtool_ibeis as vt
|
|
833
830
|
import operator
|
|
@@ -879,7 +876,7 @@ def flatten_scores(tp_scores, tn_scores, part_attrs=None):
|
|
|
879
876
|
>>> tup = flatten_scores(
|
|
880
877
|
... tp_scores, tn_scores, part_attrs)
|
|
881
878
|
>>> (X, y, attrs) = tup
|
|
882
|
-
>>> y = y.astype(
|
|
879
|
+
>>> y = y.astype(int)
|
|
883
880
|
>>> resdict = ut.odict(zip(['X', 'y', 'attrs'], [X, y, attrs]))
|
|
884
881
|
>>> result = ub.repr2(resdict, nobraces=True, with_dtype=False,
|
|
885
882
|
>>> explicit=1, nl=1)
|
|
@@ -890,7 +887,7 @@ def flatten_scores(tp_scores, tn_scores, part_attrs=None):
|
|
|
890
887
|
attrs={'qaid': np.array([21, 24, 25, 26, 11, 14, 15])},
|
|
891
888
|
"""
|
|
892
889
|
scores = np.hstack([tp_scores, tn_scores])
|
|
893
|
-
labels = np.zeros(scores.size, dtype=
|
|
890
|
+
labels = np.zeros(scores.size, dtype=bool)
|
|
894
891
|
labels[0:len(tp_scores)] = True
|
|
895
892
|
if part_attrs is None:
|
|
896
893
|
return scores, labels
|
vtool_ibeis/segmentation.py
CHANGED
|
@@ -1,5 +1,3 @@
|
|
|
1
|
-
from __future__ import absolute_import, division, print_function
|
|
2
|
-
from six.moves import range, zip # NOQA
|
|
3
1
|
import numpy as np
|
|
4
2
|
import cv2
|
|
5
3
|
import utool as ut
|
|
@@ -87,7 +85,7 @@ def demo_grabcut(bgr_img):
|
|
|
87
85
|
>>> import utool as ut
|
|
88
86
|
>>> import plottool_ibeis as pt
|
|
89
87
|
>>> import vtool_ibeis as vt
|
|
90
|
-
>>> img_fpath = ut.grab_test_imgpath('
|
|
88
|
+
>>> img_fpath = ut.grab_test_imgpath('astro')
|
|
91
89
|
>>> bgr_img = vt.imread(img_fpath)
|
|
92
90
|
>>> # execute function
|
|
93
91
|
>>> print(bgr_img.shape)
|
|
@@ -142,7 +140,7 @@ def demo_grabcut(bgr_img):
|
|
|
142
140
|
with ut.Timer('grabcut'):
|
|
143
141
|
post_mask = grabcut(bgr_img, prior_mask)
|
|
144
142
|
if post_mask.dtype == np.uint8:
|
|
145
|
-
post_mask = post_mask.astype(
|
|
143
|
+
post_mask = post_mask.astype(float) / 255.0
|
|
146
144
|
seg_chip = mask_colored_img(bgr_img, post_mask, 'bgr')
|
|
147
145
|
print('finished running grabcut')
|
|
148
146
|
pt.imshow(post_mask * 255, pnum=(1, 2, 1))
|
|
@@ -188,12 +186,12 @@ from_hsv_flags = {
|
|
|
188
186
|
|
|
189
187
|
def mask_colored_img(img_rgb, mask, encoding='bgr'):
|
|
190
188
|
if mask.dtype == np.uint8:
|
|
191
|
-
mask = mask.astype(
|
|
189
|
+
mask = mask.astype(float) / 255.0
|
|
192
190
|
into_hsv_flag = into_hsv_flags[encoding]
|
|
193
191
|
from_hsv_flag = from_hsv_flags[encoding]
|
|
194
192
|
# Mask out value component
|
|
195
193
|
img_hsv = cv2.cvtColor(img_rgb, into_hsv_flag)
|
|
196
|
-
img_hsv = np.array(img_hsv, dtype=
|
|
194
|
+
img_hsv = np.array(img_hsv, dtype=float) / 255.0
|
|
197
195
|
VAL_INDEX = 2
|
|
198
196
|
img_hsv[:, :, VAL_INDEX] *= mask
|
|
199
197
|
img_hsv = np.array(np.round(img_hsv * 255.0), dtype=np.uint8)
|
|
@@ -228,7 +226,7 @@ def grabcut2(rgb_chip):
|
|
|
228
226
|
chip_mask = np.where(is_forground, 255, 0).astype('uint8')
|
|
229
227
|
# Crop
|
|
230
228
|
chip_mask = clean_mask(chip_mask)
|
|
231
|
-
chip_mask = np.array(chip_mask,
|
|
229
|
+
chip_mask = np.array(chip_mask, float) / 255.0
|
|
232
230
|
# Mask value component of HSV space
|
|
233
231
|
seg_chip = mask_colored_img(rgb_chip, chip_mask, 'rgb')
|
|
234
232
|
return seg_chip
|
|
@@ -266,10 +264,10 @@ def segment(img_fpath, bbox_, new_size=None):
|
|
|
266
264
|
chip = img_resz[y1:y2, x1:x2]
|
|
267
265
|
chip_mask = img_mask[y1:y2, x1:x2]
|
|
268
266
|
chip_mask = clean_mask(chip_mask)
|
|
269
|
-
chip_mask = np.array(chip_mask,
|
|
267
|
+
chip_mask = np.array(chip_mask, float) / 255.0
|
|
270
268
|
# Mask the value of HSV
|
|
271
269
|
chip_hsv = cv2.cvtColor(chip, cv2.COLOR_RGB2HSV)
|
|
272
|
-
chip_hsv = np.array(chip_hsv, dtype=
|
|
270
|
+
chip_hsv = np.array(chip_hsv, dtype=float) / 255.0
|
|
273
271
|
chip_hsv[:, :, 2] *= chip_mask
|
|
274
272
|
chip_hsv = np.array(np.round(chip_hsv * 255.0), dtype=np.uint8)
|
|
275
273
|
seg_chip = cv2.cvtColor(chip_hsv, cv2.COLOR_HSV2RGB)
|
|
@@ -267,10 +267,10 @@ def compute_affine(xy1_man, xy2_man):
|
|
|
267
267
|
|
|
268
268
|
Example1:
|
|
269
269
|
>>> # ENABLE_DOCTEST
|
|
270
|
+
>>> # xdoctest: +REQUIRES(module:pyhesaff)
|
|
270
271
|
>>> from vtool_ibeis.spatial_verification import * # NOQA
|
|
271
272
|
>>> import vtool_ibeis.demodata as demodata
|
|
272
273
|
>>> import vtool_ibeis.keypoint as ktool
|
|
273
|
-
>>> import plottool_ibeis as pt
|
|
274
274
|
>>> xy1_man, xy2_man, rchip1, rchip2, T1, T2 = testdata_matching_affine_inliers_normalized()
|
|
275
275
|
>>> A_prime = compute_affine(xy1_man, xy2_man)
|
|
276
276
|
>>> A = npl.solve(T2, A_prime).dot(T1)
|
|
@@ -278,6 +278,7 @@ def compute_affine(xy1_man, xy2_man):
|
|
|
278
278
|
>>> result = np.array_str(A, precision=2)
|
|
279
279
|
>>> print(result)
|
|
280
280
|
>>> # xdoctest: +REQUIRES(--show)
|
|
281
|
+
>>> import plottool_ibeis as pt
|
|
281
282
|
>>> rchip2_blendA = pt.draw_sv.get_blended_chip(rchip1, rchip2, A)
|
|
282
283
|
>>> pt.imshow(rchip2_blendA)
|
|
283
284
|
>>> ut.show_if_requested()
|
|
@@ -318,6 +319,7 @@ def compute_homog(xy1_mn, xy2_mn):
|
|
|
318
319
|
|
|
319
320
|
Example:
|
|
320
321
|
>>> # ENABLE_DOCTEST
|
|
322
|
+
>>> # xdoctest: +REQUIRES(module:pyhesaff)
|
|
321
323
|
>>> from vtool_ibeis.spatial_verification import * # NOQA
|
|
322
324
|
>>> import vtool_ibeis.keypoint as ktool
|
|
323
325
|
>>> import vtool_ibeis.demodata as demodata
|
|
@@ -334,6 +336,7 @@ def compute_homog(xy1_mn, xy2_mn):
|
|
|
334
336
|
|
|
335
337
|
Example1:
|
|
336
338
|
>>> # ENABLE_DOCTEST
|
|
339
|
+
>>> # xdoctest: +REQUIRES(module:pyhesaff)
|
|
337
340
|
>>> from vtool_ibeis.spatial_verification import * # NOQA
|
|
338
341
|
>>> import vtool_ibeis.keypoint as ktool
|
|
339
342
|
>>> xy1_man, xy2_man, rchip1, rchip2, T1, T2 = testdata_matching_affine_inliers_normalized()
|
|
@@ -371,8 +374,8 @@ def testdata_matching_affine_inliers():
|
|
|
371
374
|
ori_thresh = 1.57
|
|
372
375
|
xy_thresh_sqrd = dlen_sqrd2 * xy_thresh
|
|
373
376
|
featkw = ut.argparse_dict(vt.get_extract_features_default_params())
|
|
374
|
-
fname1 = ut.get_argval('--fname1', type_=str, default='
|
|
375
|
-
fname2 = ut.get_argval('--fname2', type_=str, default='
|
|
377
|
+
fname1 = ut.get_argval('--fname1', type_=str, default='tsukuba_l')
|
|
378
|
+
fname2 = ut.get_argval('--fname2', type_=str, default='tsukuba_r')
|
|
376
379
|
(kpts1, kpts2, fm, fs, rchip1, rchip2) = demodata.testdata_ratio_matches(fname1, fname2, **featkw)
|
|
377
380
|
aff_inliers, aff_errors, Aff = get_best_affine_inliers_(
|
|
378
381
|
kpts1, kpts2, fm, fs, xy_thresh_sqrd, scale_thresh, ori_thresh)
|
|
@@ -461,7 +464,7 @@ def _test_hypothesis_inliers(Aff, invVR1s_m, xy2_m, det2_m, ori2_m,
|
|
|
461
464
|
ori_inliers_flag = np.less(ori_err, ori_thresh)
|
|
462
465
|
#np.logical_and(xy_inliers_flag, scale_inliers_flag)
|
|
463
466
|
# TODO Add uniqueness of matches constraint
|
|
464
|
-
#hypo_inliers_flag = np.empty(xy_inliers_flag.size, dtype=
|
|
467
|
+
#hypo_inliers_flag = np.empty(xy_inliers_flag.size, dtype=bool)
|
|
465
468
|
hypo_inliers_flag = xy_inliers_flag # Try to re-use memory
|
|
466
469
|
np.logical_and(hypo_inliers_flag, ori_inliers_flag, out=hypo_inliers_flag)
|
|
467
470
|
np.logical_and(hypo_inliers_flag, scale_inliers_flag, out=hypo_inliers_flag)
|
|
@@ -694,7 +697,6 @@ def test_homog_errors(H, kpts1, kpts2, fm, xy_thresh_sqrd, scale_thresh,
|
|
|
694
697
|
Example0:
|
|
695
698
|
>>> # DISABLE_DOCTEST
|
|
696
699
|
>>> from vtool_ibeis.spatial_verification import * # NOQA
|
|
697
|
-
>>> import plottool_ibeis as pt
|
|
698
700
|
>>> kpts1, kpts2, fm, aff_inliers, rchip1, rchip2, xy_thresh_sqrd = testdata_matching_affine_inliers()
|
|
699
701
|
>>> H = estimate_refined_transform(kpts1, kpts2, fm, aff_inliers)
|
|
700
702
|
>>> scale_thresh, ori_thresh = 2.0, 1.57
|
|
@@ -702,13 +704,13 @@ def test_homog_errors(H, kpts1, kpts2, fm, xy_thresh_sqrd, scale_thresh,
|
|
|
702
704
|
>>> homog_tup1 = test_homog_errors(H, kpts1, kpts2, fm, xy_thresh_sqrd, scale_thresh, ori_thresh, full_homog_checks)
|
|
703
705
|
>>> homog_tup = (homog_tup1[0], homog_tup1[2])
|
|
704
706
|
>>> # xdoctest: +REQUIRES(--show)
|
|
707
|
+
>>> import plottool_ibeis as pt
|
|
705
708
|
>>> pt.draw_sv.show_sv(rchip1, rchip2, kpts1, kpts2, fm, homog_tup=homog_tup)
|
|
706
709
|
>>> ut.show_if_requested()
|
|
707
710
|
|
|
708
711
|
Example1:
|
|
709
712
|
>>> # DISABLE_DOCTEST
|
|
710
713
|
>>> from vtool_ibeis.spatial_verification import * # NOQA
|
|
711
|
-
>>> import plottool_ibeis as pt
|
|
712
714
|
>>> kpts1, kpts2, fm_, aff_inliers, rchip1, rchip2, xy_thresh_sqrd = testdata_matching_affine_inliers()
|
|
713
715
|
>>> H = estimate_refined_transform(kpts1, kpts2, fm_, aff_inliers)
|
|
714
716
|
>>> scale_thresh, ori_thresh = 2.0, 1.57
|
|
@@ -731,6 +733,7 @@ def test_homog_errors(H, kpts1, kpts2, fm, xy_thresh_sqrd, scale_thresh,
|
|
|
731
733
|
... 'kpts1_m, kpts2_m, off_xy1_m, off_xy1_mt, dxy1_m, dxy1_mt, xy2_m, xy1_m, xy1_mt, scale_err, xy_err, ori_err'.split(', '))
|
|
732
734
|
>>> #---------------
|
|
733
735
|
>>> # xdoctest: +REQUIRES(--show)
|
|
736
|
+
>>> import plottool_ibeis as pt
|
|
734
737
|
>>> pt.figure(fnum=1, pnum=(1, 2, 1), title='orig points and offset point')
|
|
735
738
|
>>> segments_list1 = np.array(list(zip(xy1_m.T.tolist(), off_xy1_m.T.tolist())))
|
|
736
739
|
>>> pt.draw_line_segments(segments_list1, color=pt.LIGHT_BLUE)
|
|
@@ -871,11 +874,11 @@ def refine_inliers(kpts1, kpts2, fm, aff_inliers, xy_thresh_sqrd,
|
|
|
871
874
|
>>> # DISABLE_DOCTEST
|
|
872
875
|
>>> from vtool_ibeis.spatial_verification import * # NOQA
|
|
873
876
|
>>> import vtool_ibeis.keypoint as ktool
|
|
874
|
-
>>> import plottool_ibeis as pt
|
|
875
877
|
>>> kpts1, kpts2, fm, aff_inliers, rchip1, rchip2, xy_thresh_sqrd = testdata_matching_affine_inliers()
|
|
876
878
|
>>> homog_tup1 = refine_inliers(kpts1, kpts2, fm, aff_inliers, xy_thresh_sqrd)
|
|
877
879
|
>>> homog_tup = (homog_tup1[0], homog_tup1[2])
|
|
878
880
|
>>> # xdoctest: +REQUIRES(--show)
|
|
881
|
+
>>> import plottool_ibeis as pt
|
|
879
882
|
>>> pt.draw_sv.show_sv(rchip1, rchip2, kpts1, kpts2, fm, homog_tup=homog_tup)
|
|
880
883
|
>>> ut.show_if_requested()
|
|
881
884
|
|
|
@@ -955,8 +958,8 @@ def spatially_verify_kpts(kpts1, kpts2, fm,
|
|
|
955
958
|
>>> from vtool_ibeis.spatial_verification import *
|
|
956
959
|
>>> import vtool_ibeis.demodata as demodata
|
|
957
960
|
>>> import vtool_ibeis as vt
|
|
958
|
-
>>> fname1 = ut.get_argval('--fname1', type_=str, default='
|
|
959
|
-
>>> fname2 = ut.get_argval('--fname2', type_=str, default='
|
|
961
|
+
>>> fname1 = ut.get_argval('--fname1', type_=str, default='tsukuba_r')
|
|
962
|
+
>>> fname2 = ut.get_argval('--fname2', type_=str, default='tsukuba_l')
|
|
960
963
|
>>> default_dict = vt.get_extract_features_default_params()
|
|
961
964
|
>>> default_dict['ratio_thresh'] = .625
|
|
962
965
|
>>> kwargs = ut.argparse_dict(default_dict)
|
|
@@ -990,8 +993,6 @@ def spatially_verify_kpts(kpts1, kpts2, fm,
|
|
|
990
993
|
>>> aff_tup = (aff_inliers, Aff)
|
|
991
994
|
>>> pt.draw_sv.show_sv(rchip1, rchip2, kpts1, kpts2, fm, aff_tup=aff_tup, homog_tup=homog_tup, refine_method=refine_method)
|
|
992
995
|
>>> pt.show_if_requested()
|
|
993
|
-
tuple(numpy.ndarray, tuple(numpy.ndarray*3), numpy.ndarray, numpy.ndarray, tuple(numpy.ndarray*3), numpy.ndarray)
|
|
994
|
-
|
|
995
996
|
"""
|
|
996
997
|
if len(fm) == 0:
|
|
997
998
|
if VERBOSE_SVER:
|
vtool_ibeis/symbolic.py
CHANGED
vtool_ibeis/trig.py
CHANGED
vtool_ibeis/util_math.py
CHANGED
|
@@ -1,14 +1,11 @@
|
|
|
1
|
-
# -*- coding: utf-8 -*-
|
|
2
1
|
"""
|
|
3
2
|
# LICENCE Apache 2 or whatever
|
|
4
3
|
|
|
5
4
|
FIXME: monotization functions need more hueristics
|
|
6
5
|
"""
|
|
7
|
-
from __future__ import absolute_import, division, print_function, unicode_literals
|
|
8
6
|
import numpy as np
|
|
9
|
-
import utool as ut
|
|
10
|
-
import ubelt as ub
|
|
11
|
-
from six.moves import range, zip
|
|
7
|
+
import utool as ut # NOQA
|
|
8
|
+
import ubelt as ub # NOQA
|
|
12
9
|
|
|
13
10
|
|
|
14
11
|
TAU = np.pi * 2 # References: tauday.com
|
|
@@ -484,7 +481,7 @@ def test_language_modulus():
|
|
|
484
481
|
http://en.wikipedia.org/wiki/Modulo_operation
|
|
485
482
|
"""
|
|
486
483
|
import math
|
|
487
|
-
import utool as ut
|
|
484
|
+
import utool as ut # NOQA
|
|
488
485
|
TAU = math.pi * 2
|
|
489
486
|
num_list = [-8, -1, 0, 1, 2, 6, 7, 29]
|
|
490
487
|
modop_result_list = []
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
Apache License
|
|
1
|
+
Apache License
|
|
2
2
|
Version 2.0, January 2004
|
|
3
3
|
http://www.apache.org/licenses/
|
|
4
4
|
|
|
@@ -178,7 +178,7 @@ Apache License
|
|
|
178
178
|
APPENDIX: How to apply the Apache License to your work.
|
|
179
179
|
|
|
180
180
|
To apply the Apache License to your work, attach the following
|
|
181
|
-
boilerplate notice, with the fields enclosed by brackets "
|
|
181
|
+
boilerplate notice, with the fields enclosed by brackets "[]"
|
|
182
182
|
replaced with your own identifying information. (Don't include
|
|
183
183
|
the brackets!) The text should be enclosed in the appropriate
|
|
184
184
|
comment syntax for the file format. We also recommend that a
|
|
@@ -186,7 +186,7 @@ Apache License
|
|
|
186
186
|
same "printed page" as the copyright notice for easier
|
|
187
187
|
identification within third-party archives.
|
|
188
188
|
|
|
189
|
-
Copyright
|
|
189
|
+
Copyright 2022 "Jon Crall"
|
|
190
190
|
|
|
191
191
|
Licensed under the Apache License, Version 2.0 (the "License");
|
|
192
192
|
you may not use this file except in compliance with the License.
|
|
@@ -198,4 +198,4 @@ Apache License
|
|
|
198
198
|
distributed under the License is distributed on an "AS IS" BASIS,
|
|
199
199
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
200
200
|
See the License for the specific language governing permissions and
|
|
201
|
-
limitations under the License.
|
|
201
|
+
limitations under the License.
|