ddi-fw 0.0.170__tar.gz → 0.0.172__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (108) hide show
  1. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/PKG-INFO +1 -1
  2. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/pyproject.toml +1 -1
  3. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/core.py +40 -27
  4. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw.egg-info/PKG-INFO +1 -1
  5. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/README.md +0 -0
  6. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/setup.cfg +0 -0
  7. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/__init__.py +0 -0
  8. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/dataset_splitter.py +0 -0
  9. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/db_utils.py +0 -0
  10. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/ddi_mdl/base.py +0 -0
  11. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/ddi_mdl/data/event.db +0 -0
  12. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/ddi_mdl/debug.log +0 -0
  13. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/ddi_mdl/indexes/test_indexes.txt +0 -0
  14. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/ddi_mdl/indexes/train_fold_0.txt +0 -0
  15. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/ddi_mdl/indexes/train_fold_1.txt +0 -0
  16. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/ddi_mdl/indexes/train_fold_2.txt +0 -0
  17. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/ddi_mdl/indexes/train_fold_3.txt +0 -0
  18. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/ddi_mdl/indexes/train_fold_4.txt +0 -0
  19. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/ddi_mdl/indexes/train_indexes.txt +0 -0
  20. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/ddi_mdl/indexes/validation_fold_0.txt +0 -0
  21. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/ddi_mdl/indexes/validation_fold_1.txt +0 -0
  22. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/ddi_mdl/indexes/validation_fold_2.txt +0 -0
  23. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/ddi_mdl/indexes/validation_fold_3.txt +0 -0
  24. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/ddi_mdl/indexes/validation_fold_4.txt +0 -0
  25. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/ddi_mdl/indexes_old/test_indexes.txt +0 -0
  26. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/ddi_mdl/indexes_old/train_fold_0.txt +0 -0
  27. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/ddi_mdl/indexes_old/train_fold_1.txt +0 -0
  28. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/ddi_mdl/indexes_old/train_fold_2.txt +0 -0
  29. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/ddi_mdl/indexes_old/train_fold_3.txt +0 -0
  30. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/ddi_mdl/indexes_old/train_fold_4.txt +0 -0
  31. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/ddi_mdl/indexes_old/train_indexes.txt +0 -0
  32. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/ddi_mdl/indexes_old/validation_fold_0.txt +0 -0
  33. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/ddi_mdl/indexes_old/validation_fold_1.txt +0 -0
  34. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/ddi_mdl/indexes_old/validation_fold_2.txt +0 -0
  35. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/ddi_mdl/indexes_old/validation_fold_3.txt +0 -0
  36. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/ddi_mdl/indexes_old/validation_fold_4.txt +0 -0
  37. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/ddi_mdl/readme.md +0 -0
  38. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/ddi_mdl_text/base.py +0 -0
  39. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/ddi_mdl_text/data/event.db +0 -0
  40. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/ddi_mdl_text/indexes/test_indexes.txt +0 -0
  41. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/ddi_mdl_text/indexes/train_fold_0.txt +0 -0
  42. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/ddi_mdl_text/indexes/train_fold_1.txt +0 -0
  43. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/ddi_mdl_text/indexes/train_fold_2.txt +0 -0
  44. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/ddi_mdl_text/indexes/train_fold_3.txt +0 -0
  45. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/ddi_mdl_text/indexes/train_fold_4.txt +0 -0
  46. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/ddi_mdl_text/indexes/train_indexes.txt +0 -0
  47. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/ddi_mdl_text/indexes/validation_fold_0.txt +0 -0
  48. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/ddi_mdl_text/indexes/validation_fold_1.txt +0 -0
  49. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/ddi_mdl_text/indexes/validation_fold_2.txt +0 -0
  50. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/ddi_mdl_text/indexes/validation_fold_3.txt +0 -0
  51. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/ddi_mdl_text/indexes/validation_fold_4.txt +0 -0
  52. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/mdf_sa_ddi/__init__.py +0 -0
  53. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/mdf_sa_ddi/base.py +0 -0
  54. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/mdf_sa_ddi/df_extraction_cleanxiaoyu50.csv +0 -0
  55. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/mdf_sa_ddi/drug_information_del_noDDIxiaoyu50.csv +0 -0
  56. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/mdf_sa_ddi/indexes/test_indexes.txt +0 -0
  57. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/mdf_sa_ddi/indexes/train_fold_0.txt +0 -0
  58. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/mdf_sa_ddi/indexes/train_fold_1.txt +0 -0
  59. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/mdf_sa_ddi/indexes/train_fold_2.txt +0 -0
  60. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/mdf_sa_ddi/indexes/train_fold_3.txt +0 -0
  61. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/mdf_sa_ddi/indexes/train_fold_4.txt +0 -0
  62. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/mdf_sa_ddi/indexes/train_indexes.txt +0 -0
  63. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/mdf_sa_ddi/indexes/validation_fold_0.txt +0 -0
  64. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/mdf_sa_ddi/indexes/validation_fold_1.txt +0 -0
  65. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/mdf_sa_ddi/indexes/validation_fold_2.txt +0 -0
  66. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/mdf_sa_ddi/indexes/validation_fold_3.txt +0 -0
  67. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/mdf_sa_ddi/indexes/validation_fold_4.txt +0 -0
  68. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/mdf_sa_ddi/mdf-sa-ddi.zip +0 -0
  69. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/datasets/setup_._py +0 -0
  70. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/drugbank/__init__.py +0 -0
  71. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/drugbank/drugbank.xsd +0 -0
  72. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/drugbank/drugbank_parser.py +0 -0
  73. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/drugbank/drugbank_processor.py +0 -0
  74. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/drugbank/drugbank_processor_org.py +0 -0
  75. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/drugbank/event_extractor.py +0 -0
  76. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/langchain/__init__.py +0 -0
  77. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/langchain/embeddings.py +0 -0
  78. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/langchain/sentence_splitter.py +0 -0
  79. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/langchain/storage.py +0 -0
  80. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/ml/__init__.py +0 -0
  81. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/ml/evaluation_helper.py +0 -0
  82. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/ml/ml_helper.py +0 -0
  83. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/ml/model_wrapper.py +0 -0
  84. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/ml/pytorch_wrapper.py +0 -0
  85. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/ml/tensorflow_wrapper.py +0 -0
  86. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/ner/__init__.py +0 -0
  87. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/ner/mmlrestclient.py +0 -0
  88. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/ner/ner.py +0 -0
  89. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/pipeline/__init__.py +0 -0
  90. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/pipeline/multi_modal_combination_strategy.py +0 -0
  91. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/pipeline/multi_pipeline.py +0 -0
  92. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/pipeline/ner_pipeline.py +0 -0
  93. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/pipeline/pipeline.py +0 -0
  94. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/utils/__init__.py +0 -0
  95. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/utils/enums.py +0 -0
  96. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/utils/json_helper.py +0 -0
  97. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/utils/kaggle.py +0 -0
  98. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/utils/package_helper.py +0 -0
  99. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/utils/py7zr_helper.py +0 -0
  100. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/utils/utils.py +0 -0
  101. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/utils/zip_helper.py +0 -0
  102. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/vectorization/__init__.py +0 -0
  103. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/vectorization/feature_vector_generation.py +0 -0
  104. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw/vectorization/idf_helper.py +0 -0
  105. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw.egg-info/SOURCES.txt +0 -0
  106. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw.egg-info/dependency_links.txt +0 -0
  107. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw.egg-info/requires.txt +0 -0
  108. {ddi_fw-0.0.170 → ddi_fw-0.0.172}/src/ddi_fw.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ddi_fw
3
- Version: 0.0.170
3
+ Version: 0.0.172
4
4
  Summary: Do not use :)
5
5
  Author-email: Kıvanç Bayraktar <bayraktarkivanc@gmail.com>
6
6
  Maintainer-email: Kıvanç Bayraktar <bayraktarkivanc@gmail.com>
@@ -6,7 +6,7 @@ build-backend = "setuptools.build_meta"
6
6
 
7
7
  [project]
8
8
  name = "ddi_fw"
9
- version = "0.0.170"
9
+ version = "0.0.172"
10
10
  description = "Do not use :)"
11
11
  readme = "README.md"
12
12
  authors = [
@@ -1,4 +1,5 @@
1
1
  import glob
2
+ import logging
2
3
  from typing import Any, Dict, List, Optional, Type
3
4
  import numpy as np
4
5
  import pandas as pd
@@ -20,7 +21,6 @@ except ImportError:
20
21
  "Failed to import langchain.embeddings module. ")
21
22
 
22
23
 
23
-
24
24
  def stack(df_column):
25
25
  return np.stack(df_column.values)
26
26
 
@@ -56,25 +56,20 @@ def generate_sim_matrices_new(df, generated_vectors, columns, key_column="id"):
56
56
 
57
57
  class BaseDataset(BaseModel):
58
58
  dataset_name: str
59
- index_path: str
59
+ index_path: Optional[str] = None
60
60
  dataset_splitter_type: Type[DatasetSplitter]
61
61
  class_column: str = 'class'
62
62
  dataframe: Optional[pd.DataFrame] = None
63
- X_train: Optional[pd.DataFrame] = None
64
- X_test: Optional[pd.DataFrame] = None
65
- y_train: Optional[pd.Series] = None
66
- y_test: Optional[pd.Series] = None
67
- train_indexes: Optional[pd.Index] = None
68
- test_indexes: Optional[pd.Index] = None
69
- train_idx_arr: List|None = None
70
- val_idx_arr: List|None = None
71
- # train_idx_arr: Optional[List[np.ndarray]] = None
72
- # val_idx_arr: Optional[List[np.ndarray]] = None
63
+ X_train: Optional[pd.DataFrame | np.ndarray] = None
64
+ X_test: Optional[pd.DataFrame | np.ndarray] = None
65
+ y_train: Optional[pd.Series | np.ndarray] = None
66
+ y_test: Optional[pd.Series | np.ndarray] = None
67
+ train_indexes: Optional[pd.Index] = None
68
+ test_indexes: Optional[pd.Index] = None
69
+ train_idx_arr: Optional[List[np.ndarray]] = None
70
+ val_idx_arr: Optional[List[np.ndarray]] = None
73
71
  columns: List[str] = []
74
72
 
75
- # feature_process: FeatureProcessor
76
- # similarity_matrix_service: SimilarityMatrixService
77
-
78
73
  class Config:
79
74
  arbitrary_types_allowed = True
80
75
 
@@ -93,7 +88,7 @@ class BaseDataset(BaseModel):
93
88
  # items.append([f'{column}_embedding', train_data,
94
89
  # y_train_label, test_data, y_test_label])
95
90
  return items
96
-
91
+
97
92
  @computed_field
98
93
  @property
99
94
  def dataset_splitter(self) -> DatasetSplitter:
@@ -107,9 +102,22 @@ class BaseDataset(BaseModel):
107
102
  pass
108
103
 
109
104
  def load(self):
105
+ """
106
+ Load the dataset. If X_train, y_train, X_test, and y_test are already provided,
107
+ skip deriving them. Otherwise, derive them from the dataframe and indices.
108
+ """
109
+ if self.X_train is not None and self.y_train is not None and self.X_test is not None and self.y_test is not None:
110
+ # Data is already provided, no need to calculate
111
+ logging.info(
112
+ "X_train, y_train, X_test, and y_test are already provided. Skipping calculation.")
113
+ return self.X_train, self.X_test, self.y_train, self.y_test, self.train_indexes, self.test_indexes, self.train_idx_arr, self.val_idx_arr
114
+
110
115
  if self.index_path is None:
111
116
  raise Exception(
112
- "There is no index path, please call split function")
117
+ "There is no index path. Please call split_dataset or provide indices.")
118
+
119
+ if self.dataframe is None:
120
+ raise Exception("There is no dataframe to derive data from.")
113
121
 
114
122
  try:
115
123
  train_idx_all, test_idx_all, train_idx_arr, val_idx_arr = self.__get_indexes__(
@@ -119,9 +127,6 @@ class BaseDataset(BaseModel):
119
127
 
120
128
  self.prep()
121
129
 
122
- if self.dataframe is None:
123
- raise Exception("There is no dataframe")
124
-
125
130
  train = self.dataframe[self.dataframe.index.isin(train_idx_all)]
126
131
  test = self.dataframe[self.dataframe.index.isin(test_idx_all)]
127
132
 
@@ -135,7 +140,7 @@ class BaseDataset(BaseModel):
135
140
  self.train_idx_arr = train_idx_arr
136
141
  self.val_idx_arr = val_idx_arr
137
142
 
138
- return self.X_train, self.X_test, self.y_train, self.y_test, self.X_train.index, self.X_test.index, train_idx_arr, val_idx_arr
143
+ return self.X_train, self.X_test, self.y_train, self.y_test, self.train_indexes, self.test_indexes, self.train_idx_arr, self.val_idx_arr
139
144
 
140
145
  def __get_indexes__(self, path):
141
146
  train_index_path = path+'/train_indexes.txt'
@@ -167,14 +172,21 @@ class BaseDataset(BaseModel):
167
172
  f.write('\n'.join(str_indexes))
168
173
 
169
174
  def split_dataset(self, save_indexes: bool = False):
170
- # TODO class type should be parametric
175
+ """
176
+ Split the dataset into training and testing sets. This method is only available
177
+ if a dataframe exists. If X_train, y_train, X_test, and y_test are already present,
178
+ raise an error.
179
+ """
180
+ if self.X_train is not None or self.X_test is not None:
181
+ raise Exception(
182
+ "X_train and X_test are already present. Splitting is not allowed.")
183
+
184
+ if self.dataframe is None:
185
+ raise Exception("There is no dataframe to split.")
171
186
 
172
187
  save_path = self.index_path
173
188
  self.prep()
174
189
 
175
- if self.dataframe is None:
176
- raise Exception("There is no data")
177
-
178
190
  X = self.dataframe.drop(self.class_column, axis=1)
179
191
  y = self.dataframe[self.class_column]
180
192
 
@@ -206,8 +218,9 @@ class BaseDataset(BaseModel):
206
218
 
207
219
 
208
220
  class TextDatasetMixin(BaseDataset):
209
- embedding_size: Optional[int] = None
210
- embedding_dict: Dict[str, Any] = Field(default_factory=dict, description="Dictionary for embeddings")
221
+ embedding_size: Optional[int] = None
222
+ embedding_dict: Dict[str, Any] = Field(
223
+ default_factory=dict, description="Dictionary for embeddings")
211
224
  embeddings_pooling_strategy: PoolingStrategy | None = None
212
225
 
213
226
  def process_text(self):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ddi_fw
3
- Version: 0.0.170
3
+ Version: 0.0.172
4
4
  Summary: Do not use :)
5
5
  Author-email: Kıvanç Bayraktar <bayraktarkivanc@gmail.com>
6
6
  Maintainer-email: Kıvanç Bayraktar <bayraktarkivanc@gmail.com>
File without changes
File without changes
File without changes