1 | /** |
---|
2 | * Importer service |
---|
3 | * |
---|
4 | * The importer service handles the import of tabular, comma delimited and Excel format |
---|
5 | * based files. |
---|
6 | * |
---|
7 | * @package importer |
---|
8 | * @author t.w.abma@umcutrecht.nl |
---|
9 | * @since 20100126 |
---|
10 | * |
---|
11 | * Revision information: |
---|
12 | * $Rev: 1492 $ |
---|
13 | * $Author: t.w.abma@umcutrecht.nl $ |
---|
14 | * $Date: 2011-02-04 10:16:53 +0000 (vr, 04 feb 2011) $ |
---|
15 | */ |
---|
16 | package dbnp.importer |
---|
17 | import org.dbnp.gdt.* |
---|
18 | import org.apache.poi.ss.usermodel.* |
---|
19 | import dbnp.studycapturing.* |
---|
20 | |
---|
21 | class ImporterService { |
---|
22 | def authenticationService |
---|
23 | |
---|
24 | boolean transactional = true |
---|
25 | |
---|
26 | /** |
---|
27 | * @param is input stream representing the (workbook) resource |
---|
28 | * @return high level representation of the workbook |
---|
29 | */ |
---|
30 | Workbook getWorkbook(InputStream is) { |
---|
31 | WorkbookFactory.create(is) |
---|
32 | } |
---|
33 | |
---|
34 | /** |
---|
35 | * @param wb high level representation of the workbook |
---|
36 | * @param sheetindex sheet to use within the workbook |
---|
37 | * @return header representation as a MappingColumn hashmap |
---|
38 | */ |
---|
39 | def getHeader(Workbook wb, int sheetindex, int headerrow, int datamatrix_start, theEntity = null) { |
---|
40 | def sheet = wb.getSheetAt(sheetindex) |
---|
41 | def sheetrow = sheet.getRow(datamatrix_start) |
---|
42 | //def header = [] |
---|
43 | def header = [:] |
---|
44 | def df = new DataFormatter() |
---|
45 | def property = new String() |
---|
46 | |
---|
47 | //for (Cell c: sheet.getRow(datamatrix_start)) { |
---|
48 | |
---|
49 | (0..sheetrow.getLastCellNum() - 1).each { columnindex -> |
---|
50 | |
---|
51 | //def index = c.getColumnIndex() |
---|
52 | def datamatrix_celltype = sheet.getRow(datamatrix_start).getCell(columnindex, Row.CREATE_NULL_AS_BLANK).getCellType() |
---|
53 | def datamatrix_celldata = df.formatCellValue(sheet.getRow(datamatrix_start).getCell(columnindex)) |
---|
54 | def datamatrix_cell = sheet.getRow(datamatrix_start).getCell(columnindex) |
---|
55 | def headercell = sheet.getRow(headerrow - 1 + sheet.getFirstRowNum()).getCell(columnindex) |
---|
56 | def tft = TemplateFieldType.STRING //default templatefield type |
---|
57 | |
---|
58 | // Check for every celltype, currently redundant code, but possibly this will be |
---|
59 | // a piece of custom code for every cell type like specific formatting |
---|
60 | |
---|
61 | switch (datamatrix_celltype) { |
---|
62 | case Cell.CELL_TYPE_STRING: |
---|
63 | //parse cell value as double |
---|
64 | def doubleBoolean = true |
---|
65 | def fieldtype = TemplateFieldType.STRING |
---|
66 | |
---|
67 | // is this string perhaps a double? |
---|
68 | try { |
---|
69 | formatValue(datamatrix_celldata, TemplateFieldType.DOUBLE) |
---|
70 | } catch (NumberFormatException nfe) { doubleBoolean = false } |
---|
71 | finally { |
---|
72 | if (doubleBoolean) fieldtype = TemplateFieldType.DOUBLE |
---|
73 | } |
---|
74 | |
---|
75 | header[columnindex] = new dbnp.importer.MappingColumn(name: df.formatCellValue(headercell), |
---|
76 | templatefieldtype: fieldtype, |
---|
77 | index: columnindex, |
---|
78 | entity: theEntity, |
---|
79 | property: property); |
---|
80 | |
---|
81 | break |
---|
82 | case Cell.CELL_TYPE_NUMERIC: |
---|
83 | def fieldtype = TemplateFieldType.LONG |
---|
84 | def doubleBoolean = true |
---|
85 | def longBoolean = true |
---|
86 | |
---|
87 | // is this cell really an integer? |
---|
88 | try { |
---|
89 | Long.valueOf(datamatrix_celldata) |
---|
90 | } catch (NumberFormatException nfe) { longBoolean = false } |
---|
91 | finally { |
---|
92 | if (longBoolean) fieldtype = TemplateFieldType.LONG |
---|
93 | } |
---|
94 | |
---|
95 | // it's not an long, perhaps a double? |
---|
96 | if (!longBoolean) |
---|
97 | try { |
---|
98 | formatValue(datamatrix_celldata, TemplateFieldType.DOUBLE) |
---|
99 | } catch (NumberFormatException nfe) { doubleBoolean = false } |
---|
100 | finally { |
---|
101 | if (doubleBoolean) fieldtype = TemplateFieldType.DOUBLE |
---|
102 | } |
---|
103 | |
---|
104 | if (DateUtil.isCellDateFormatted(datamatrix_cell)) fieldtype = TemplateFieldType.DATE |
---|
105 | |
---|
106 | header[columnindex] = new dbnp.importer.MappingColumn(name: df.formatCellValue(headercell), |
---|
107 | templatefieldtype: fieldtype, |
---|
108 | index: columnindex, |
---|
109 | entity: theEntity, |
---|
110 | property: property); |
---|
111 | break |
---|
112 | case Cell.CELL_TYPE_BLANK: |
---|
113 | header[columnindex] = new dbnp.importer.MappingColumn(name: df.formatCellValue(headercell), |
---|
114 | templatefieldtype: TemplateFieldType.STRING, |
---|
115 | index: columnindex, |
---|
116 | entity: theEntity, |
---|
117 | property: property); |
---|
118 | break |
---|
119 | default: |
---|
120 | header[columnindex] = new dbnp.importer.MappingColumn(name: df.formatCellValue(headercell), |
---|
121 | templatefieldtype: TemplateFieldType.STRING, |
---|
122 | index: columnindex, |
---|
123 | entity: theEntity, |
---|
124 | property: property); |
---|
125 | break |
---|
126 | } // end of switch |
---|
127 | } // end of cell loop |
---|
128 | return header |
---|
129 | } |
---|
130 | |
---|
131 | /** |
---|
132 | * This method is meant to return a matrix of the rows and columns |
---|
133 | * used in the preview |
---|
134 | * |
---|
135 | * @param wb workbook object |
---|
136 | * @param sheetindex sheet index used |
---|
137 | * @param rows amount of rows returned |
---|
138 | * @return two dimensional array (matrix) of Cell objects |
---|
139 | */ |
---|
140 | Object[][] getDatamatrix(Workbook wb, header, int sheetindex, int datamatrix_start, int count) { |
---|
141 | def sheet = wb.getSheetAt(sheetindex) |
---|
142 | def rows = [] |
---|
143 | def df = new DataFormatter() |
---|
144 | |
---|
145 | count = (count < sheet.getLastRowNum()) ? count : sheet.getLastRowNum() |
---|
146 | |
---|
147 | // walk through all rows |
---|
148 | ((datamatrix_start + sheet.getFirstRowNum())..count).each { rowindex -> |
---|
149 | def row = [] |
---|
150 | |
---|
151 | (0..header.size() - 1).each { columnindex -> |
---|
152 | if (sheet.getRow(rowindex)) |
---|
153 | row.add( sheet.getRow(rowindex).getCell(columnindex, Row.CREATE_NULL_AS_BLANK) ) |
---|
154 | } |
---|
155 | |
---|
156 | rows.add(row) |
---|
157 | } |
---|
158 | |
---|
159 | return rows |
---|
160 | } |
---|
161 | |
---|
162 | /** |
---|
163 | * This method will move a file to a new location. |
---|
164 | * |
---|
165 | * @param file File object to move |
---|
166 | * @param folderpath folder to move the file to |
---|
167 | * @param filename (new) filename to give |
---|
168 | * @return if file has been moved succesful, the new path and filename will be returned, otherwise an empty string will be returned |
---|
169 | */ |
---|
170 | def moveFile(File file, String folderpath, String filename) { |
---|
171 | try { |
---|
172 | def rnd = ""; //System.currentTimeMillis() |
---|
173 | file.transferTo(new File(folderpath, rnd + filename)) |
---|
174 | return folderpath + filename |
---|
175 | } catch (Exception exception) { |
---|
176 | log.error "File move error, ${exception}" |
---|
177 | return "" |
---|
178 | } |
---|
179 | } |
---|
180 | |
---|
181 | /** |
---|
182 | * @return random numeric value |
---|
183 | */ |
---|
184 | def random = { |
---|
185 | return System.currentTimeMillis() + Runtime.runtime.freeMemory() |
---|
186 | } |
---|
187 | |
---|
188 | /** |
---|
189 | * Method to read data from a workbook and to import data into a two dimensional |
---|
190 | * array |
---|
191 | * |
---|
192 | * @param template_id template identifier to use fields from |
---|
193 | * @param wb POI horrible spreadsheet formatted workbook object |
---|
194 | * @param mcmap linked hashmap (preserved order) of MappingColumns |
---|
195 | * @param sheetindex sheet to use when using multiple sheets |
---|
196 | * @param rowindex first row to start with reading the actual data (NOT the header) |
---|
197 | * @return two dimensional array containing records (with entities) |
---|
198 | * |
---|
199 | * @see dbnp.importer.MappingColumn |
---|
200 | */ |
---|
201 | def importData(template_id, Workbook wb, int sheetindex, int rowindex, mcmap) { |
---|
202 | def sheet = wb.getSheetAt(sheetindex) |
---|
203 | def template = Template.get(template_id) |
---|
204 | def table = [] |
---|
205 | def failedcells = [] // list of records |
---|
206 | |
---|
207 | // walk through all rows and fill the table with records |
---|
208 | (rowindex..sheet.getLastRowNum()).each { i -> |
---|
209 | // Create an entity record based on a row read from Excel and store the cells which failed to be mapped |
---|
210 | def (record, failed) = createRecord(template, sheet.getRow(i), mcmap) |
---|
211 | |
---|
212 | // Add record with entity and its values to the table |
---|
213 | table.add(record) |
---|
214 | |
---|
215 | // If failed cells have been found, add them to the failed cells list |
---|
216 | if (failed?.importcells?.size() > 0) failedcells.add(failed) |
---|
217 | } |
---|
218 | |
---|
219 | return [table, failedcells] |
---|
220 | } |
---|
221 | |
---|
222 | /** Method to put failed cells back into the datamatrix. Failed cells are cell values |
---|
223 | * which could not be stored in an entity (e.g. Humu Supiuns in an ontology field). |
---|
224 | * Empty corrections should not be stored |
---|
225 | * |
---|
226 | * @param datamatrix two dimensional array containing entities and possibly also failed cells |
---|
227 | * @param failedcells list with maps of failed cells in [mappingcolumn, cell] format |
---|
228 | * @param correctedcells map of corrected cells in [cellhashcode, value] format |
---|
229 | * */ |
---|
230 | def saveCorrectedCells(datamatrix, failedcells, correctedcells) { |
---|
231 | |
---|
232 | // Loop through all failed cells (stored as |
---|
233 | failedcells.each { record -> |
---|
234 | record.value.importcells.each { cell -> |
---|
235 | |
---|
236 | // Get the corrected value |
---|
237 | def correctedvalue = correctedcells.find { it.key.toInteger() == cell.getIdentifier()}.value |
---|
238 | |
---|
239 | // Find the record in the table which the mappingcolumn belongs to |
---|
240 | def tablerecord = datamatrix.find { it.hashCode() == record.key } |
---|
241 | |
---|
242 | // Loop through all entities in the record and correct them if necessary |
---|
243 | tablerecord.each { rec -> |
---|
244 | rec.each { entity -> |
---|
245 | try { |
---|
246 | // Update the entity field |
---|
247 | entity.setFieldValue(cell.mappingcolumn.property, correctedvalue) |
---|
248 | //log.info "Adjusted " + cell.mappingcolumn.property + " to " + correctedvalue |
---|
249 | } |
---|
250 | catch (Exception e) { |
---|
251 | //log.info "Could not map corrected ontology: " + cell.mappingcolumn.property + " to " + correctedvalue |
---|
252 | } |
---|
253 | } |
---|
254 | } // end of table record |
---|
255 | } // end of cell record |
---|
256 | } // end of failedlist |
---|
257 | } |
---|
258 | |
---|
259 | /** |
---|
260 | * Method to store a matrix containing the entities in a record like structure. Every row in the table |
---|
261 | * contains one or more entity objects (which contain fields with values). So actually a row represents |
---|
262 | * a record with fields from one or more different entities. |
---|
263 | * |
---|
264 | * @param study entity Study |
---|
265 | * @param datamatrix two dimensional array containing entities with values read from Excel file |
---|
266 | */ |
---|
267 | static saveDatamatrix(Study study, datamatrix, authenticationService, log) { |
---|
268 | def validatedSuccesfully = 0 |
---|
269 | def entitystored = null |
---|
270 | |
---|
271 | // Study passed? Sync data |
---|
272 | if (study != null) study.refresh() |
---|
273 | |
---|
274 | // go through the data matrix, read every record and validate the entity and try to persist it |
---|
275 | datamatrix.each { record -> |
---|
276 | record.each { entity -> |
---|
277 | switch (entity.getClass()) { |
---|
278 | case Study: log.info ".importer wizard, persisting Study `" + entity + "`: " |
---|
279 | entity.owner = authenticationService.getLoggedInUser() |
---|
280 | |
---|
281 | if (study.validate()) { |
---|
282 | if (!entity.save(flush:true)) { |
---|
283 | log.error ".importer wizard, study could not be saved: " + entity |
---|
284 | throw new Exception('.importer wizard, study could not be saved: ' + entity) |
---|
285 | } |
---|
286 | } else { |
---|
287 | log.error ".importer wizard, study could not be validated: " + entity |
---|
288 | throw new Exception('.importer wizard, study could not be validated: ' + entity) |
---|
289 | } |
---|
290 | |
---|
291 | break |
---|
292 | case Subject: log.info ".importer wizard, persisting Subject `" + entity + "`: " |
---|
293 | |
---|
294 | // is the current entity not already in the database? |
---|
295 | //entitystored = isEntityStored(entity) |
---|
296 | |
---|
297 | // this entity is new, so add it to the study |
---|
298 | //if (entitystored==null) |
---|
299 | |
---|
300 | study.addToSubjects(entity) |
---|
301 | println "subject persisting" + entity.dump() |
---|
302 | |
---|
303 | break |
---|
304 | case Event: log.info ".importer wizard, persisting Event `" + entity + "`: " |
---|
305 | study.addToEvents(entity) |
---|
306 | break |
---|
307 | case Sample: log.info ".importer wizard, persisting Sample `" + entity + "`: " |
---|
308 | |
---|
309 | // is this sample validatable (sample name unique for example?) |
---|
310 | study.addToSamples(entity) |
---|
311 | |
---|
312 | break |
---|
313 | case SamplingEvent: log.info ".importer wizard, persisting SamplingEvent `" + entity + "`: " |
---|
314 | study.addToSamplingEvents(entity) |
---|
315 | break |
---|
316 | default: log.info ".importer wizard, skipping persisting of `" + entity.getclass() + "`" |
---|
317 | break |
---|
318 | } // end switch |
---|
319 | } // end record |
---|
320 | } // end datamatrix |
---|
321 | |
---|
322 | // validate study |
---|
323 | if (study.validate()) { |
---|
324 | if (!study.save(flush: true)) { |
---|
325 | //this.appendErrors(flow.study, flash.wizardErrors) |
---|
326 | throw new Exception('.importer wizard [saveDatamatrix] error while saving study') |
---|
327 | } |
---|
328 | } else { |
---|
329 | throw new Exception('.importer wizard [saveDatamatrix] study does not validate') |
---|
330 | } |
---|
331 | |
---|
332 | //persistEntity(study) |
---|
333 | |
---|
334 | //return [validatedSuccesfully, updatedentities, failedtopersist] |
---|
335 | //return [0,0,0] |
---|
336 | return true |
---|
337 | } |
---|
338 | |
---|
339 | /** |
---|
340 | * Check whether an entity already exist. A unique field in the entity is |
---|
341 | * used to check whether the instantiated entity (read from Excel) is new. |
---|
342 | * If the entity is found in the database it will be returned as is. |
---|
343 | * |
---|
344 | * @param entity entity object like a Study, Subject, Sample et cetera |
---|
345 | * @return entity if found, otherwise null |
---|
346 | */ |
---|
347 | def isEntityStored(entity) { |
---|
348 | switch (entity.getClass()) { |
---|
349 | case Study: return Study.findByCode(entity.code) |
---|
350 | break |
---|
351 | case Subject: return Subject.findByParentAndName(entity.parent, entity.name) |
---|
352 | break |
---|
353 | case Event: break |
---|
354 | case Sample: |
---|
355 | break |
---|
356 | case SamplingEvent: break |
---|
357 | default: // unknown entity |
---|
358 | return null |
---|
359 | } |
---|
360 | } |
---|
361 | |
---|
362 | /** |
---|
363 | * Find the entity and update the fields. The entity is an instance |
---|
364 | * read from Excel. This method looks in the database for the entity |
---|
365 | * having the same identifier. If it has found the same entity |
---|
366 | * already in the database, it will update the record. |
---|
367 | * |
---|
368 | * @param entitystored existing record in the database to update |
---|
369 | * @param entity entity read from Excel |
---|
370 | */ |
---|
371 | def updateEntity(entitystored, entity) { |
---|
372 | switch (entity.getClass()) { |
---|
373 | case Study: break |
---|
374 | case Subject: entitystored.properties = entity.properties |
---|
375 | entitystored.save() |
---|
376 | break |
---|
377 | case Event: break |
---|
378 | case Sample: break |
---|
379 | case SamplingEvent: break |
---|
380 | default: // unknown entity |
---|
381 | return null |
---|
382 | } |
---|
383 | } |
---|
384 | |
---|
385 | /** |
---|
386 | * Method to persist entities into the database |
---|
387 | * Checks whether entity already exists (based on identifier column 'name') |
---|
388 | * |
---|
389 | * @param entity entity object like Study, Subject, Protocol et cetera |
---|
390 | * |
---|
391 | */ |
---|
392 | boolean persistEntity(entity) { |
---|
393 | /*log.info ".import wizard persisting ${entity}" |
---|
394 | |
---|
395 | try { |
---|
396 | entity.save(flush: true) |
---|
397 | return true |
---|
398 | |
---|
399 | } catch (Exception e) { |
---|
400 | def session = sessionFactory.currentSession |
---|
401 | session.setFlushMode(org.hibernate.FlushMode.MANUAL) |
---|
402 | log.error ".import wizard, failed to save entity:\n" + org.apache.commons.lang.exception.ExceptionUtils.getRootCauseMessage(e) |
---|
403 | } |
---|
404 | |
---|
405 | return true*/ |
---|
406 | //println "persistEntity" |
---|
407 | } |
---|
408 | |
---|
409 | /** |
---|
410 | * This method creates a record (array) containing entities with values |
---|
411 | * |
---|
412 | * @param template_id template identifier |
---|
413 | * @param excelrow POI based Excel row containing the cells |
---|
414 | * @param mcmap map containing MappingColumn objects |
---|
415 | * @return list of entities and list of failed cells |
---|
416 | */ |
---|
417 | def createRecord(template, Row excelrow, mcmap) { |
---|
418 | def df = new DataFormatter() |
---|
419 | def tft = TemplateFieldType |
---|
420 | def record = [] // list of entities and the read values |
---|
421 | def failed = new ImportRecord() // map with entity identifier and failed mappingcolumn |
---|
422 | |
---|
423 | // Initialize all possible entities with the chosen template |
---|
424 | def study = new Study(template: template) |
---|
425 | def subject = new Subject(template: template) |
---|
426 | def samplingEvent = new SamplingEvent(template: template) |
---|
427 | def event = new Event(template: template) |
---|
428 | def sample = new Sample(template: template) |
---|
429 | |
---|
430 | // Go through the Excel row cell by cell |
---|
431 | for (Cell cell: excelrow) { |
---|
432 | // get the MappingColumn information of the current cell |
---|
433 | def mc = mcmap[cell.getColumnIndex()] |
---|
434 | def value |
---|
435 | |
---|
436 | // Check if column must be imported |
---|
437 | if (mc != null) if (!mc.dontimport) { |
---|
438 | try { |
---|
439 | value = formatValue(df.formatCellValue(cell), mc.templatefieldtype) |
---|
440 | } catch (NumberFormatException nfe) { |
---|
441 | value = "" |
---|
442 | } |
---|
443 | |
---|
444 | try { |
---|
445 | // which entity does the current cell (field) belong to? |
---|
446 | switch (mc.entity) { |
---|
447 | case Study: // does the entity already exist in the record? If not make it so. |
---|
448 | (record.any {it.getClass() == mc.entity}) ? 0 : record.add(study) |
---|
449 | study.setFieldValue(mc.property, value) |
---|
450 | break |
---|
451 | case Subject: (record.any {it.getClass() == mc.entity}) ? 0 : record.add(subject) |
---|
452 | subject.setFieldValue(mc.property, value) |
---|
453 | break |
---|
454 | case SamplingEvent: (record.any {it.getClass() == mc.entity}) ? 0 : record.add(samplingEvent) |
---|
455 | samplingEvent.setFieldValue(mc.property, value) |
---|
456 | break |
---|
457 | case Event: (record.any {it.getClass() == mc.entity}) ? 0 : record.add(event) |
---|
458 | event.setFieldValue(mc.property, value) |
---|
459 | break |
---|
460 | case Sample: (record.any {it.getClass() == mc.entity}) ? 0 : record.add(sample) |
---|
461 | sample.setFieldValue(mc.property, value) |
---|
462 | break |
---|
463 | case Object: // don't import |
---|
464 | break |
---|
465 | } // end switch |
---|
466 | } catch (Exception iae) { |
---|
467 | log.error ".import wizard error could not set property `" + mc.property + "` to value `" + value + "`" |
---|
468 | // store the mapping column and value which failed |
---|
469 | def identifier |
---|
470 | |
---|
471 | switch (mc.entity) { |
---|
472 | case Study: identifier = "entity_" + study.getIdentifier() + "_" + mc.property |
---|
473 | break |
---|
474 | case Subject: identifier = "entity_" + subject.getIdentifier() + "_" + mc.property |
---|
475 | break |
---|
476 | case SamplingEvent: identifier = "entity_" + samplingEvent.getIdentifier() + "_" + mc.property |
---|
477 | break |
---|
478 | case Event: identifier = "entity_" + event.getIdentifier() + "_" + mc.property |
---|
479 | break |
---|
480 | case Sample: identifier = "entity_" + sample.getIdentifier() + "_" + mc.property |
---|
481 | break |
---|
482 | case Object: // don't import |
---|
483 | break |
---|
484 | } |
---|
485 | |
---|
486 | def mcInstance = new MappingColumn() |
---|
487 | mcInstance.properties = mc.properties |
---|
488 | failed.addToImportcells(new ImportCell(mappingcolumn: mcInstance, value: value, entityidentifier: identifier)) |
---|
489 | } |
---|
490 | } // end |
---|
491 | } // end for |
---|
492 | // a failed column means that using the entity.setFieldValue() threw an exception |
---|
493 | return [record, failed] |
---|
494 | } |
---|
495 | |
---|
496 | /** |
---|
497 | * Method to parse a value conform a specific type |
---|
498 | * @param value string containing the value |
---|
499 | * @return object corresponding to the TemplateFieldType |
---|
500 | */ |
---|
501 | def formatValue(String value, TemplateFieldType type) throws NumberFormatException { |
---|
502 | switch (type) { |
---|
503 | case TemplateFieldType.STRING: return value.trim() |
---|
504 | case TemplateFieldType.TEXT: return value.trim() |
---|
505 | case TemplateFieldType.LONG: return (long) Double.valueOf(value) |
---|
506 | //case TemplateFieldType.FLOAT : return Float.valueOf(value.replace(",",".")); |
---|
507 | case TemplateFieldType.DOUBLE: return Double.valueOf(value.replace(",", ".")); |
---|
508 | case TemplateFieldType.STRINGLIST: return value.trim() |
---|
509 | case TemplateFieldType.ONTOLOGYTERM: return value.trim() |
---|
510 | case TemplateFieldType.DATE: return value |
---|
511 | default: return value |
---|
512 | } |
---|
513 | } |
---|
514 | |
---|
515 | // classes for fuzzy string matching |
---|
516 | // <FUZZY MATCHING> |
---|
517 | |
---|
518 | static def similarity(l_seq, r_seq, degree = 2) { |
---|
519 | def l_histo = countNgramFrequency(l_seq, degree) |
---|
520 | def r_histo = countNgramFrequency(r_seq, degree) |
---|
521 | |
---|
522 | dotProduct(l_histo, r_histo) / |
---|
523 | Math.sqrt(dotProduct(l_histo, l_histo) * |
---|
524 | dotProduct(r_histo, r_histo)) |
---|
525 | } |
---|
526 | |
---|
527 | static def countNgramFrequency(sequence, degree) { |
---|
528 | def histo = [:] |
---|
529 | def items = sequence.size() |
---|
530 | |
---|
531 | for (int i = 0; i + degree <= items; i++) { |
---|
532 | def gram = sequence[i..<(i + degree)] |
---|
533 | histo[gram] = 1 + histo.get(gram, 0) |
---|
534 | } |
---|
535 | histo |
---|
536 | } |
---|
537 | |
---|
538 | static def dotProduct(l_histo, r_histo) { |
---|
539 | def sum = 0 |
---|
540 | l_histo.each { key, value -> |
---|
541 | sum = sum + l_histo[key] * r_histo.get(key, 0) |
---|
542 | } |
---|
543 | sum |
---|
544 | } |
---|
545 | |
---|
546 | static def stringSimilarity(l_str, r_str, degree = 2) { |
---|
547 | |
---|
548 | similarity(l_str.toString().toLowerCase().toCharArray(), |
---|
549 | r_str.toString().toLowerCase().toCharArray(), |
---|
550 | degree) |
---|
551 | } |
---|
552 | |
---|
553 | static def mostSimilar(pattern, candidates, threshold = 0) { |
---|
554 | def topScore = 0 |
---|
555 | def bestFit = null |
---|
556 | |
---|
557 | candidates.each { candidate -> |
---|
558 | def score = stringSimilarity(pattern, candidate) |
---|
559 | if (score > topScore) { |
---|
560 | topScore = score |
---|
561 | bestFit = candidate |
---|
562 | } |
---|
563 | } |
---|
564 | |
---|
565 | if (topScore < threshold) |
---|
566 | bestFit = null |
---|
567 | |
---|
568 | bestFit |
---|
569 | } |
---|
570 | // </FUZZY MATCHING> |
---|
571 | |
---|
572 | } |
---|