pyprocessors-nameparser 0.5.3__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pyprocessors_nameparser-0.5.3/.dockerignore +9 -0
- pyprocessors_nameparser-0.5.3/.gitignore +20 -0
- pyprocessors_nameparser-0.5.3/Dockerfile +19 -0
- pyprocessors_nameparser-0.5.3/Jenkinsfile +408 -0
- pyprocessors_nameparser-0.5.3/PKG-INFO +66 -0
- pyprocessors_nameparser-0.5.3/README.md +23 -0
- pyprocessors_nameparser-0.5.3/bumpversion.py +41 -0
- pyprocessors_nameparser-0.5.3/pyprocessors_nameparser/__init__.py +2 -0
- pyprocessors_nameparser-0.5.3/pyprocessors_nameparser/name_parser.py +40 -0
- pyprocessors_nameparser-0.5.3/pyproject.toml +83 -0
- pyprocessors_nameparser-0.5.3/setup.py +46 -0
- pyprocessors_nameparser-0.5.3/tests/__init__.py +0 -0
- pyprocessors_nameparser-0.5.3/tests/data/afp_ner_fr-document-test-anon.json +4081 -0
- pyprocessors_nameparser-0.5.3/tests/data/afp_ner_fr-document-test.json +4073 -0
- pyprocessors_nameparser-0.5.3/tests/data/mindspeak_demo-document-test-anon.json +67 -0
- pyprocessors_nameparser-0.5.3/tests/data/mindspeak_demo-document-test.json +65 -0
- pyprocessors_nameparser-0.5.3/tests/data/terrorisme_ner_v2-document-test-anon.json +204 -0
- pyprocessors_nameparser-0.5.3/tests/data/terrorisme_ner_v2-document-test.json +196 -0
- pyprocessors_nameparser-0.5.3/tests/test_nameparser.py +23 -0
- pyprocessors_nameparser-0.5.3/tox.ini +51 -0
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
/.idea/
|
|
2
|
+
/.vscode/
|
|
3
|
+
/dist/
|
|
4
|
+
dist
|
|
5
|
+
.git
|
|
6
|
+
.cache
|
|
7
|
+
**/.pytest_cache
|
|
8
|
+
models
|
|
9
|
+
log
|
|
10
|
+
**/__pycache__/
|
|
11
|
+
*.pyc
|
|
12
|
+
**/test-reports
|
|
13
|
+
**/.passwd-pypi
|
|
14
|
+
**/.env
|
|
15
|
+
**/results.xml
|
|
16
|
+
.groovylintrc.json
|
|
17
|
+
.emailNotif
|
|
18
|
+
|
|
19
|
+
# Unit test / coverage reports
|
|
20
|
+
.tox/
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
FROM python:3.8-slim-buster
|
|
2
|
+
# Install prerequisites
|
|
3
|
+
RUN apt-get update -y && \
|
|
4
|
+
apt-get install -y \
|
|
5
|
+
patch \
|
|
6
|
+
gcc && \
|
|
7
|
+
apt-get install -y --no-install-recommends \
|
|
8
|
+
g++ \
|
|
9
|
+
git && \
|
|
10
|
+
# Final upgrade + clean
|
|
11
|
+
apt-get update -y && \
|
|
12
|
+
apt-get clean all -y
|
|
13
|
+
|
|
14
|
+
# Enable Installing packages as root
|
|
15
|
+
ENV FLIT_ROOT_INSTALL=1
|
|
16
|
+
|
|
17
|
+
# Add pyproject.toml + README.md for flit install
|
|
18
|
+
ADD pyproject.toml pyproject.toml
|
|
19
|
+
ADD README.md README.md
|
|
@@ -0,0 +1,408 @@
|
|
|
1
|
+
pipeline {
|
|
2
|
+
environment {
|
|
3
|
+
PATH_HOME = '/home/jenkins'
|
|
4
|
+
TEST_REPORT_DIR = '/root/test-reports'
|
|
5
|
+
PYTHONPYCACHEPREFIX = '/tmp/.pytest_cache'
|
|
6
|
+
PYTHONDONTWRITEBYTECODE = '1'
|
|
7
|
+
JENKINS_UIDGID = '1004:1004'
|
|
8
|
+
|
|
9
|
+
MAJOR_VERSION = '0'
|
|
10
|
+
MINOR_VERSION = '5'
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
agent none
|
|
14
|
+
|
|
15
|
+
triggers {
|
|
16
|
+
upstream(upstreamProjects: 'pymultirole_plugins/' + BRANCH_NAME.replaceAll('/', '%2F'),\
|
|
17
|
+
threshold: hudson.model.Result.SUCCESS)
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
stages {
|
|
21
|
+
stage('Catch build termination') {
|
|
22
|
+
agent {
|
|
23
|
+
node {
|
|
24
|
+
label 'built-in'
|
|
25
|
+
customWorkspace "${PATH_HOME}/${JOB_NAME}"
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
stages {
|
|
29
|
+
stage('Analyse build cause') {
|
|
30
|
+
steps {
|
|
31
|
+
script {
|
|
32
|
+
analyseBuildCause()
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
stage('Generate new version') {
|
|
40
|
+
when {
|
|
41
|
+
environment name: 'SKIP_JOB', value: '0'
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
agent {
|
|
45
|
+
node {
|
|
46
|
+
label 'built-in'
|
|
47
|
+
customWorkspace "${PATH_HOME}/${JOB_NAME}"
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
stages {
|
|
52
|
+
stage('Add credentials') {
|
|
53
|
+
steps {
|
|
54
|
+
script {
|
|
55
|
+
// Add password file for flit publishing
|
|
56
|
+
sh "cp ${PATH_HOME}/.passwd-pypi .env"
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
stage('Commit new version') {
|
|
62
|
+
steps {
|
|
63
|
+
script {
|
|
64
|
+
println("attempt to publish ${JOB_NAME} with version: ${MAJOR_VERSION}.${MINOR_VERSION}.${BUILD_ID}")
|
|
65
|
+
|
|
66
|
+
// push updates of file __init__.py
|
|
67
|
+
withCredentials([gitUsernamePassword(credentialsId: 'bitbucket-user', gitToolName: 'git-tool')]) {
|
|
68
|
+
sh 'git pull'
|
|
69
|
+
sh "echo '\"\"\"Processor based on Nameparser\"\"\"' > pyprocessors_nameparser/__init__.py"
|
|
70
|
+
sh "echo '__version__ = \"${MAJOR_VERSION}.${MINOR_VERSION}.${BUILD_ID}\"' >> pyprocessors_nameparser/__init__.py"
|
|
71
|
+
sh 'git commit pyprocessors_nameparser/__init__.py -m "[Jenkins CI] Commit on version files" || echo "No changes to commit"'
|
|
72
|
+
sh 'git push'
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
stage('Build, test and publish') {
|
|
81
|
+
when {
|
|
82
|
+
beforeAgent true
|
|
83
|
+
environment name: 'SKIP_JOB', value: '0'
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
agent {
|
|
87
|
+
// dockerfile agent
|
|
88
|
+
// Mounted volume for Junit reports
|
|
89
|
+
// - docker: /root/test-reports
|
|
90
|
+
// - host : /tmp/_${JOB_NAME}/test-reports
|
|
91
|
+
dockerfile {
|
|
92
|
+
label 'built-in'
|
|
93
|
+
customWorkspace "${PATH_HOME}/${JOB_NAME}"
|
|
94
|
+
filename 'Dockerfile'
|
|
95
|
+
args "-u root --privileged -v /tmp/_${JOB_NAME}/test-reports:${TEST_REPORT_DIR}"
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
stages {
|
|
100
|
+
stage('Install flit & flake8') {
|
|
101
|
+
steps {
|
|
102
|
+
// remove any previous tox env
|
|
103
|
+
sh 'rm -rf .tox'
|
|
104
|
+
sh 'python -m pip install pip==22.0.3'
|
|
105
|
+
sh 'pip install --no-cache-dir flit==3.2.0 flake8==3.9.2 flakehell tox'
|
|
106
|
+
sh 'flit install'
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
stage('Test & lint python code') {
|
|
111
|
+
steps {
|
|
112
|
+
// remove any previous results.xml file
|
|
113
|
+
sh "rm -f ${TEST_REPORT_DIR}/results.xml"
|
|
114
|
+
sh 'tox'
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
stage('Publish on PyPI') {
|
|
119
|
+
environment {
|
|
120
|
+
FLIT_USERNAME = getUserName '.env'
|
|
121
|
+
FLIT_PASSWORD = getUserPass '.env'
|
|
122
|
+
}
|
|
123
|
+
steps {
|
|
124
|
+
// remove any previous folder dist
|
|
125
|
+
sh 'rm -rf dist'
|
|
126
|
+
// create (as root) folder dist
|
|
127
|
+
sh 'mkdir dist'
|
|
128
|
+
// pull recent updates of file __init__.py
|
|
129
|
+
withCredentials([gitUsernamePassword(credentialsId: 'bitbucket-user', gitToolName: 'git-tool')]) {
|
|
130
|
+
sh 'git config --global pull.rebase false'
|
|
131
|
+
sh "git config --global --add safe.directory ${WORKSPACE}"
|
|
132
|
+
sh 'git pull'
|
|
133
|
+
}
|
|
134
|
+
// put back owner of .git folder
|
|
135
|
+
sh "chown -R ${JENKINS_UIDGID} ${WORKSPACE}/.git"
|
|
136
|
+
// put back owner of pulled file
|
|
137
|
+
sh "chown ${JENKINS_UIDGID} pyprocessors_nameparser/__init__.py"
|
|
138
|
+
// get git status
|
|
139
|
+
sh 'git status'
|
|
140
|
+
// publish on PyPI
|
|
141
|
+
sh '''
|
|
142
|
+
export COMMIT_VERSION=$( cat pyprocessors_nameparser/__init__.py|grep version|cut -d '"' -f2|tr -s '[:blank:]' )
|
|
143
|
+
export BUILD_VERSION="${MAJOR_VERSION}"."${MINOR_VERSION}"."${BUILD_ID}"
|
|
144
|
+
if [ "${COMMIT_VERSION}" = "${BUILD_VERSION}" ] ; then flit publish ; fi
|
|
145
|
+
'''
|
|
146
|
+
// remove current folder dist
|
|
147
|
+
sh 'rm -rf dist'
|
|
148
|
+
// remove current folder .hypothesis
|
|
149
|
+
sh 'rm -rf .hypothesis'
|
|
150
|
+
// remove current folder .tox
|
|
151
|
+
sh 'rm -rf .tox'
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
post {
|
|
159
|
+
// only triggered when blue or green sign
|
|
160
|
+
success {
|
|
161
|
+
// node is specified here to get an agent
|
|
162
|
+
node('built-in') {
|
|
163
|
+
// keep using customWorkspace to store Junit report
|
|
164
|
+
ws("${PATH_HOME}/${JOB_NAME}") {
|
|
165
|
+
script {
|
|
166
|
+
try {
|
|
167
|
+
sh 'rm -f results.xml'
|
|
168
|
+
sh "cp /tmp/_${JOB_NAME}/test-reports/results.xml results.xml"
|
|
169
|
+
} catch (Exception e) {
|
|
170
|
+
println 'Exception occurred: ' + e.toString()
|
|
171
|
+
}
|
|
172
|
+
try {
|
|
173
|
+
junit 'results.xml'
|
|
174
|
+
} catch (Exception e) {
|
|
175
|
+
println 'Exception occurred: ' + e.toString()
|
|
176
|
+
}
|
|
177
|
+
if (sendEmailNotif("${PATH_HOME}/${JOB_NAME}", "${BUILD_NUMBER}")) {
|
|
178
|
+
println 'sending Success Build notification'
|
|
179
|
+
CUSTOM_SUBJECT = '[CI - Jenkinzz SUCCESS] ' + CUSTOM_SUBJECT
|
|
180
|
+
emailext(
|
|
181
|
+
mimeType: 'text/html',
|
|
182
|
+
subject: CUSTOM_SUBJECT,
|
|
183
|
+
body: '${DEFAULT_CONTENT}',
|
|
184
|
+
replyTo: '${DEFAULT_REPLYTO}',
|
|
185
|
+
to: '${ADMIN_RECIPIENTS}' + ';' + CUSTOM_RECIPIENTS
|
|
186
|
+
)
|
|
187
|
+
switchEmailNotif(false, BUILD_NUMBER)
|
|
188
|
+
} else {
|
|
189
|
+
println 'preventing Success Build notification'
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
}
|
|
195
|
+
// triggered when red sign
|
|
196
|
+
failure {
|
|
197
|
+
// node is specified here to get an agent
|
|
198
|
+
node('built-in') {
|
|
199
|
+
// keep using customWorkspace to store Junit report
|
|
200
|
+
ws("${PATH_HOME}/${JOB_NAME}") {
|
|
201
|
+
script {
|
|
202
|
+
try {
|
|
203
|
+
sh 'rm -f results.xml'
|
|
204
|
+
sh "cp /tmp/_${JOB_NAME}/test-reports/results.xml results.xml"
|
|
205
|
+
} catch (Exception e) {
|
|
206
|
+
println 'Exception occurred: ' + e.toString()
|
|
207
|
+
}
|
|
208
|
+
try {
|
|
209
|
+
junit 'results.xml'
|
|
210
|
+
} catch (Exception e) {
|
|
211
|
+
println 'Exception occurred: ' + e.toString()
|
|
212
|
+
}
|
|
213
|
+
println 'sending Failure Build notification'
|
|
214
|
+
CUSTOM_SUBJECT = '[CI - Jenkinzz FAILURE] ' + CUSTOM_SUBJECT
|
|
215
|
+
emailext(
|
|
216
|
+
mimeType: 'text/html',
|
|
217
|
+
subject: CUSTOM_SUBJECT,
|
|
218
|
+
body: '${DEFAULT_CONTENT}',
|
|
219
|
+
replyTo: '${DEFAULT_REPLYTO}',
|
|
220
|
+
to: '${ADMIN_RECIPIENTS}' + ';' + CUSTOM_RECIPIENTS
|
|
221
|
+
)
|
|
222
|
+
}
|
|
223
|
+
}
|
|
224
|
+
}
|
|
225
|
+
}
|
|
226
|
+
// triggered when black sign
|
|
227
|
+
aborted {
|
|
228
|
+
println 'post-declarative message: abort job'
|
|
229
|
+
}
|
|
230
|
+
// trigger every-works
|
|
231
|
+
//always {
|
|
232
|
+
//}
|
|
233
|
+
}
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
// return FLIT_USERNAME from given file
|
|
237
|
+
def getUserName(path) {
|
|
238
|
+
USERNAME = sh(
|
|
239
|
+
script: "grep FLIT_USERNAME ${path}|cut -d '=' -f2",
|
|
240
|
+
returnStdout: true
|
|
241
|
+
).trim()
|
|
242
|
+
return USERNAME
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
// return FLIT_PASSWORD from given file
|
|
246
|
+
def getUserPass(path) {
|
|
247
|
+
USERPASS = sh(
|
|
248
|
+
script: "grep FLIT_PASSWORD ${path}|cut -d '=' -f2",
|
|
249
|
+
returnStdout: true
|
|
250
|
+
).trim()
|
|
251
|
+
return USERPASS
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
// create/remove emailNotif file to trigger email notification
|
|
255
|
+
def switchEmailNotif(toggle, build) {
|
|
256
|
+
if (toggle) {
|
|
257
|
+
sh 'echo ' + build + ' > .emailNotif'
|
|
258
|
+
} else {
|
|
259
|
+
if (build == BUILD_NUMBER) {
|
|
260
|
+
sh 'rm -f .emailNotif'
|
|
261
|
+
}
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
// return true if emailNotif file present
|
|
266
|
+
boolean sendEmailNotif(path, build) {
|
|
267
|
+
emailNotif = sh(
|
|
268
|
+
script: "find ${path} -name '.emailNotif'|wc -l",
|
|
269
|
+
returnStdout: true
|
|
270
|
+
).trim()
|
|
271
|
+
emailContent = ''
|
|
272
|
+
if (emailNotif == '1') {
|
|
273
|
+
emailContent = sh(
|
|
274
|
+
script: "cat ${path}/.emailNotif",
|
|
275
|
+
returnStdout: true
|
|
276
|
+
).trim()
|
|
277
|
+
}
|
|
278
|
+
return (emailContent == build)
|
|
279
|
+
}
|
|
280
|
+
|
|
281
|
+
def analyseBuildCause() {
|
|
282
|
+
upstreamProjects = ['pyimporters_plugins']
|
|
283
|
+
boolean upstreamRunning = false
|
|
284
|
+
String jobName
|
|
285
|
+
// iterate over upstreamProjects
|
|
286
|
+
for (upstream_project in upstreamProjects) {
|
|
287
|
+
Jenkins.instance.getItemByFullName(upstream_project).items.each { repository ->
|
|
288
|
+
boolean isRunning = false
|
|
289
|
+
//repository.parent.name: project
|
|
290
|
+
//repository.name: branch
|
|
291
|
+
if ( repository.name == BRANCH_NAME ) {
|
|
292
|
+
// iterate over all jobs of current repository
|
|
293
|
+
repository.allJobs.each { job ->
|
|
294
|
+
// iterate over all builds of current job
|
|
295
|
+
job.builds.each { build ->
|
|
296
|
+
// determine if a build is running or not
|
|
297
|
+
if ( build.result == (null) ) {
|
|
298
|
+
jobName = build.parent.parent.name
|
|
299
|
+
isRunning = true
|
|
300
|
+
}
|
|
301
|
+
}
|
|
302
|
+
if ( isRunning ) {
|
|
303
|
+
upstreamRunning = true
|
|
304
|
+
}
|
|
305
|
+
}
|
|
306
|
+
}
|
|
307
|
+
}
|
|
308
|
+
}
|
|
309
|
+
|
|
310
|
+
// Catch if build has been triggered by CI Commit
|
|
311
|
+
// returnStatus = true when string not found -> Team commit
|
|
312
|
+
// returnStatus = false when string is found -> CI commit
|
|
313
|
+
boolean lastCommitIsTeam = sh(
|
|
314
|
+
script: 'git log -1 | grep "\\[Jenkins CI\\]"',
|
|
315
|
+
returnStatus: true
|
|
316
|
+
)
|
|
317
|
+
|
|
318
|
+
// Skip build when upstream detected
|
|
319
|
+
if (upstreamRunning) {
|
|
320
|
+
println 'Skipping build because upstream job detected (' + jobName + ')'
|
|
321
|
+
env.SKIP_JOB = '1'
|
|
322
|
+
switchEmailNotif(false, 0)
|
|
323
|
+
currentBuild.result = 'NOT_BUILT'
|
|
324
|
+
}
|
|
325
|
+
|
|
326
|
+
// Catch if build has been triggered by User
|
|
327
|
+
boolean isStartedByUser = currentBuild.rawBuild.getCause(hudson.model.Cause$UserIdCause) != null
|
|
328
|
+
if (isStartedByUser && !upstreamRunning) {
|
|
329
|
+
env.SKIP_JOB = '0'
|
|
330
|
+
env.CUSTOM_SUBJECT = JOB_NAME + ' - Manual Build #' + BUILD_NUMBER
|
|
331
|
+
env.CUSTOM_RECIPIENTS = emailextrecipients([[$class: 'RequesterRecipientProvider']])
|
|
332
|
+
switchEmailNotif(true, BUILD_NUMBER)
|
|
333
|
+
println 'Job started by User, proceeding'
|
|
334
|
+
}
|
|
335
|
+
|
|
336
|
+
// Catch if build has been triggered by Upstream
|
|
337
|
+
boolean isStartedByUpstream = currentBuild.rawBuild.getCause(hudson.model.Cause$UpstreamCause) != null
|
|
338
|
+
if (isStartedByUpstream && !upstreamRunning) {
|
|
339
|
+
int changeSetCount = 0
|
|
340
|
+
int ciSkipCount = 0
|
|
341
|
+
String upstreamFullJobName = ''
|
|
342
|
+
for (Run upstreamBuild : currentBuild.upstreamBuilds) {
|
|
343
|
+
upstreamFullJobName = upstreamBuild.rawBuild.fullDisplayName
|
|
344
|
+
if (upstreamBuild.changeSets != null) {
|
|
345
|
+
def changeLogSets = upstreamBuild.changeSets
|
|
346
|
+
for (int i = 0; i < changeLogSets.size(); i++) {
|
|
347
|
+
changeSetCount++
|
|
348
|
+
def entries = changeLogSets[i].items
|
|
349
|
+
for (int j = 0; j < entries.length; j++) {
|
|
350
|
+
def entry = entries[j]
|
|
351
|
+
if (entry.msg.contains('[Jenkins CI]')) {
|
|
352
|
+
ciSkipCount++
|
|
353
|
+
}
|
|
354
|
+
}
|
|
355
|
+
}
|
|
356
|
+
}
|
|
357
|
+
}
|
|
358
|
+
if (changeSetCount > 0 && changeSetCount == ciSkipCount) {
|
|
359
|
+
env.SKIP_JOB = '1'
|
|
360
|
+
switchEmailNotif(false, 0)
|
|
361
|
+
println 'Job started by Upstream [' + upstreamFullJobName + '], with CI commit, skipping'
|
|
362
|
+
currentBuild.result = 'NOT_BUILT'
|
|
363
|
+
} else {
|
|
364
|
+
env.SKIP_JOB = '0'
|
|
365
|
+
env.CUSTOM_SUBJECT = JOB_NAME + ' - Upstream Build #' + BUILD_NUMBER
|
|
366
|
+
env.CUSTOM_RECIPIENTS = emailextrecipients([[$class:'UpstreamComitterRecipientProvider']])
|
|
367
|
+
switchEmailNotif(true, BUILD_NUMBER)
|
|
368
|
+
println 'Job started by Upstream [' + upstreamFullJobName + '], proceeding'
|
|
369
|
+
}
|
|
370
|
+
}
|
|
371
|
+
|
|
372
|
+
// Catch if build has been triggered by User Commit
|
|
373
|
+
boolean isStartedByCommit = currentBuild.rawBuild.getCause(jenkins.branch.BranchEventCause) != null
|
|
374
|
+
if (isStartedByCommit && lastCommitIsTeam && !upstreamRunning) {
|
|
375
|
+
env.SKIP_JOB = '0'
|
|
376
|
+
env.CUSTOM_SUBJECT = JOB_NAME + ' - SCM Build #' + BUILD_NUMBER
|
|
377
|
+
env.CUSTOM_RECIPIENTS = emailextrecipients([[$class: 'DevelopersRecipientProvider'], [$class:'CulpritsRecipientProvider']])
|
|
378
|
+
switchEmailNotif(true, BUILD_NUMBER)
|
|
379
|
+
println 'Job started by User Commit, proceeding'
|
|
380
|
+
}
|
|
381
|
+
|
|
382
|
+
// Catch if build has been triggered by cron
|
|
383
|
+
boolean isStartedByCron = currentBuild.rawBuild.getCause(hudson.triggers.TimerTrigger$TimerTriggerCause) != null
|
|
384
|
+
if (isStartedByCron && lastCommitIsTeam && !upstreamRunning) {
|
|
385
|
+
env.SKIP_JOB = '0'
|
|
386
|
+
env.CUSTOM_SUBJECT = JOB_NAME + ' - CRON Build #' + BUILD_NUMBER
|
|
387
|
+
env.CUSTOM_RECIPIENTS = emailextrecipients([[$class: 'DevelopersRecipientProvider'], [$class:'CulpritsRecipientProvider']])
|
|
388
|
+
switchEmailNotif(true, BUILD_NUMBER)
|
|
389
|
+
println 'Job started by Cron, proceeding'
|
|
390
|
+
}
|
|
391
|
+
|
|
392
|
+
// Catch if build has been triggered by branch discovery
|
|
393
|
+
boolean isStartedByBranchDiscovery = currentBuild.rawBuild.getCause(jenkins.branch.BranchIndexingCause) != null
|
|
394
|
+
if (isStartedByBranchDiscovery && lastCommitIsTeam && !upstreamRunning) {
|
|
395
|
+
env.SKIP_JOB = '0'
|
|
396
|
+
env.CUSTOM_SUBJECT = JOB_NAME + ' - BranchDiscovery Build #' + BUILD_NUMBER
|
|
397
|
+
env.CUSTOM_RECIPIENTS = emailextrecipients([[$class: 'DevelopersRecipientProvider'], [$class:'CulpritsRecipientProvider']])
|
|
398
|
+
switchEmailNotif(true, BUILD_NUMBER)
|
|
399
|
+
println 'Job started by Branch Discovery, proceeding'
|
|
400
|
+
}
|
|
401
|
+
|
|
402
|
+
if (!lastCommitIsTeam && !upstreamRunning && !isStartedByUser && !isStartedByUpstream) {
|
|
403
|
+
println 'Skipping build because last commit has been done by CI'
|
|
404
|
+
env.SKIP_JOB = '1'
|
|
405
|
+
switchEmailNotif(false, 0)
|
|
406
|
+
//currentBuild.result = 'NOT_BUILT'
|
|
407
|
+
}
|
|
408
|
+
}
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
Metadata-Version: 2.3
|
|
2
|
+
Name: pyprocessors-nameparser
|
|
3
|
+
Version: 0.5.3
|
|
4
|
+
Summary: Processor based on Nameparser
|
|
5
|
+
Home-page: https://kairntech.com/
|
|
6
|
+
Author: Olivier Terrier
|
|
7
|
+
Author-email: olivier.terrier@kairntech.com
|
|
8
|
+
Requires-Python: >=3.8
|
|
9
|
+
Description-Content-Type: text/markdown
|
|
10
|
+
Classifier: Intended Audience :: Information Technology
|
|
11
|
+
Classifier: Intended Audience :: Developers
|
|
12
|
+
Classifier: Intended Audience :: System Administrators
|
|
13
|
+
Classifier: Operating System :: OS Independent
|
|
14
|
+
Classifier: Topic :: Software Development :: Libraries :: Application Frameworks
|
|
15
|
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
16
|
+
Classifier: Topic :: Software Development :: Libraries
|
|
17
|
+
Classifier: Topic :: Software Development
|
|
18
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
19
|
+
Classifier: Development Status :: 4 - Beta
|
|
20
|
+
Classifier: Programming Language :: Python :: 3.8
|
|
21
|
+
Requires-Dist: pymultirole_plugins>=0.5.0,<0.6.0
|
|
22
|
+
Requires-Dist: log-with-context
|
|
23
|
+
Requires-Dist: nameparser
|
|
24
|
+
Requires-Dist: flit ; extra == "dev"
|
|
25
|
+
Requires-Dist: pre-commit ; extra == "dev"
|
|
26
|
+
Requires-Dist: bump2version ; extra == "dev"
|
|
27
|
+
Requires-Dist: sphinx ; extra == "docs"
|
|
28
|
+
Requires-Dist: sphinx-rtd-theme ; extra == "docs"
|
|
29
|
+
Requires-Dist: m2r2 ; extra == "docs"
|
|
30
|
+
Requires-Dist: sphinxcontrib.apidoc ; extra == "docs"
|
|
31
|
+
Requires-Dist: jupyter_sphinx ; extra == "docs"
|
|
32
|
+
Requires-Dist: pytest ; extra == "test"
|
|
33
|
+
Requires-Dist: pytest-cov ; extra == "test"
|
|
34
|
+
Requires-Dist: pytest-flake8 ; extra == "test"
|
|
35
|
+
Requires-Dist: pytest-black ; extra == "test"
|
|
36
|
+
Requires-Dist: flake8==3.9.2 ; extra == "test"
|
|
37
|
+
Requires-Dist: tox ; extra == "test"
|
|
38
|
+
Requires-Dist: dirty-equals ; extra == "test"
|
|
39
|
+
Provides-Extra: dev
|
|
40
|
+
Provides-Extra: docs
|
|
41
|
+
Provides-Extra: test
|
|
42
|
+
|
|
43
|
+
## Requirements
|
|
44
|
+
|
|
45
|
+
- Python 3.8+
|
|
46
|
+
- Flit to put Python packages and modules on PyPI
|
|
47
|
+
- Pydantic for the data parts.
|
|
48
|
+
|
|
49
|
+
## Installation
|
|
50
|
+
```
|
|
51
|
+
pip install flit
|
|
52
|
+
pip install pymultirole-plugins
|
|
53
|
+
```
|
|
54
|
+
|
|
55
|
+
## Publish the Python Package to PyPI
|
|
56
|
+
- Increment the version of your package in the `__init__.py` file:
|
|
57
|
+
```
|
|
58
|
+
"""An amazing package!"""
|
|
59
|
+
|
|
60
|
+
__version__ = 'x.y.z'
|
|
61
|
+
```
|
|
62
|
+
- Publish
|
|
63
|
+
```
|
|
64
|
+
flit publish
|
|
65
|
+
```
|
|
66
|
+
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
## Requirements
|
|
2
|
+
|
|
3
|
+
- Python 3.8+
|
|
4
|
+
- Flit to put Python packages and modules on PyPI
|
|
5
|
+
- Pydantic for the data parts.
|
|
6
|
+
|
|
7
|
+
## Installation
|
|
8
|
+
```
|
|
9
|
+
pip install flit
|
|
10
|
+
pip install pymultirole-plugins
|
|
11
|
+
```
|
|
12
|
+
|
|
13
|
+
## Publish the Python Package to PyPI
|
|
14
|
+
- Increment the version of your package in the `__init__.py` file:
|
|
15
|
+
```
|
|
16
|
+
"""An amazing package!"""
|
|
17
|
+
|
|
18
|
+
__version__ = 'x.y.z'
|
|
19
|
+
```
|
|
20
|
+
- Publish
|
|
21
|
+
```
|
|
22
|
+
flit publish
|
|
23
|
+
```
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
import re
|
|
2
|
+
import sys
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def main(argv):
|
|
7
|
+
part = argv[0].lower() if len(argv) > 0 else "minor"
|
|
8
|
+
Jenkinsfile = Path("./Jenkinsfile")
|
|
9
|
+
pyprojectfile = Path("./pyproject.toml")
|
|
10
|
+
with Jenkinsfile.open("r", encoding="utf-8") as fin:
|
|
11
|
+
data = fin.read()
|
|
12
|
+
for line in data.split(fin.newlines):
|
|
13
|
+
if "MAJOR_VERSION" in line:
|
|
14
|
+
result = re.search(r'MAJOR_VERSION\s*=\s*"([0-9]+)"', line)
|
|
15
|
+
if result:
|
|
16
|
+
major = int(result.group(1))
|
|
17
|
+
if part == "major":
|
|
18
|
+
to_replace = result.group(0)
|
|
19
|
+
by_replace = f'MAJOR_VERSION = "{major + 1}"'
|
|
20
|
+
if "MINOR_VERSION" in line:
|
|
21
|
+
result = re.search(r'MINOR_VERSION\s*=\s*"([0-9]+)"', line)
|
|
22
|
+
if result:
|
|
23
|
+
minor = int(result.group(1))
|
|
24
|
+
if part == "minor":
|
|
25
|
+
to_replace = result.group(0)
|
|
26
|
+
by_replace = f'MINOR_VERSION = "{minor + 1}"'
|
|
27
|
+
data = data.replace(to_replace, by_replace)
|
|
28
|
+
with Jenkinsfile.open("wt") as fout:
|
|
29
|
+
fout.write(data)
|
|
30
|
+
|
|
31
|
+
depend_string = f"{major}.{minor}.0,<{major}.{minor + 1}.0"
|
|
32
|
+
new_depend_string = f"{major}.{minor + 1}.0,<{major}.{minor + 2}.0"
|
|
33
|
+
with pyprojectfile.open("r", encoding="utf-8") as fin:
|
|
34
|
+
data = fin.read()
|
|
35
|
+
data = data.replace(depend_string, new_depend_string)
|
|
36
|
+
with pyprojectfile.open("wt") as fout:
|
|
37
|
+
fout.write(data)
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
if __name__ == "__main__":
|
|
41
|
+
main(sys.argv[1:])
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
from logging import Logger
|
|
2
|
+
from typing import List, Type, cast
|
|
3
|
+
from log_with_context import add_logging_context
|
|
4
|
+
from nameparser import HumanName
|
|
5
|
+
from pydantic import Field, BaseModel
|
|
6
|
+
from pymultirole_plugins.v1.processor import ProcessorParameters, ProcessorBase
|
|
7
|
+
from pymultirole_plugins.v1.schema import Document
|
|
8
|
+
|
|
9
|
+
logger = Logger("pymultirole")
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class NameParserParameters(ProcessorParameters):
|
|
13
|
+
name_labels: List[str] = Field(None, description="List of labels to analyze", extra="label")
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class NameParserProcessor(ProcessorBase):
|
|
17
|
+
__doc__ = """NameParser based on [Nameparser](https://github.com/derek73/python-nameparser)."""
|
|
18
|
+
|
|
19
|
+
def process(
|
|
20
|
+
self, documents: List[Document], parameters: ProcessorParameters
|
|
21
|
+
) -> List[Document]:
|
|
22
|
+
params: NameParserParameters = cast(NameParserParameters, parameters)
|
|
23
|
+
try:
|
|
24
|
+
for document in documents:
|
|
25
|
+
with add_logging_context(docid=document.identifier):
|
|
26
|
+
if document.annotations:
|
|
27
|
+
for a in document.annotations:
|
|
28
|
+
if a.labelName in params.name_labels:
|
|
29
|
+
atext = a.text or document.text[a.start:a.end]
|
|
30
|
+
name = HumanName(atext)
|
|
31
|
+
props = a.properties or {}
|
|
32
|
+
props.update(name.as_dict())
|
|
33
|
+
a.properties = props
|
|
34
|
+
except BaseException as err:
|
|
35
|
+
raise err
|
|
36
|
+
return documents
|
|
37
|
+
|
|
38
|
+
@classmethod
|
|
39
|
+
def get_model(cls) -> Type[BaseModel]:
|
|
40
|
+
return NameParserParameters
|