001/* 002 * Copyright 2019 Anyware Services 003 * 004 * Licensed under the Apache License, Version 2.0 (the "License"); 005 * you may not use this file except in compliance with the License. 006 * You may obtain a copy of the License at 007 * 008 * http://www.apache.org/licenses/LICENSE-2.0 009 * 010 * Unless required by applicable law or agreed to in writing, software 011 * distributed under the License is distributed on an "AS IS" BASIS, 012 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 013 * See the License for the specific language governing permissions and 014 * limitations under the License. 015 */ 016package org.ametys.odf.skill; 017 018import java.io.File; 019import java.io.FileInputStream; 020import java.io.IOException; 021import java.io.InputStreamReader; 022import java.util.ArrayList; 023import java.util.HashMap; 024import java.util.HashSet; 025import java.util.LinkedHashMap; 026import java.util.List; 027import java.util.Map; 028import java.util.Objects; 029import java.util.Optional; 030import java.util.Set; 031import java.util.stream.Collectors; 032 033import org.apache.avalon.framework.component.Component; 034import org.apache.avalon.framework.service.ServiceException; 035import org.apache.avalon.framework.service.ServiceManager; 036import org.apache.avalon.framework.service.Serviceable; 037import org.apache.commons.lang.ArrayUtils; 038import org.apache.commons.lang3.StringUtils; 039import org.supercsv.io.CsvListReader; 040import org.supercsv.prefs.CsvPreference; 041 042import org.ametys.cms.ObservationConstants; 043import org.ametys.cms.data.ContentValue; 044import org.ametys.cms.indexing.solr.SolrIndexHelper; 045import org.ametys.cms.repository.Content; 046import org.ametys.cms.repository.ContentQueryHelper; 047import org.ametys.cms.repository.ContentTypeExpression; 048import org.ametys.cms.repository.ModifiableContent; 049import org.ametys.cms.repository.WorkflowAwareContent; 050import org.ametys.cms.workflow.AbstractContentWorkflowComponent; 051import org.ametys.cms.workflow.ContentWorkflowHelper; 052import org.ametys.core.observation.ObservationManager; 053import org.ametys.odf.ODFHelper; 054import org.ametys.odf.ProgramItem; 055import org.ametys.odf.course.Course; 056import org.ametys.odf.enumeration.OdfReferenceTableEntry; 057import org.ametys.odf.program.AbstractProgram; 058import org.ametys.plugins.repository.AmetysObjectIterable; 059import org.ametys.plugins.repository.AmetysObjectIterator; 060import org.ametys.plugins.repository.AmetysObjectResolver; 061import org.ametys.plugins.repository.AmetysRepositoryException; 062import org.ametys.plugins.repository.data.holder.group.impl.ModifiableModelAwareRepeater; 063import org.ametys.plugins.repository.data.holder.group.impl.ModifiableModelAwareRepeaterEntry; 064import org.ametys.plugins.repository.query.expression.AndExpression; 065import org.ametys.plugins.repository.query.expression.Expression.Operator; 066import org.ametys.plugins.repository.query.expression.StringExpression; 067import org.ametys.runtime.plugin.component.AbstractLogEnabled; 068 069import com.opensymphony.workflow.WorkflowException; 070 071/** 072 * ODF skills helper 073 */ 074public class ODFSkillsHelper extends AbstractLogEnabled implements Serviceable, Component 075{ 076 /** The avalon role. */ 077 public static final String ROLE = ODFSkillsHelper.class.getName(); 078 079 /** The name of repeater for acquired skills */ 080 public static final String COURSES_REPEATER_ACQUIRED_SKILLS = "acquiredSkills"; 081 082 /** The skill content type id */ 083 public static final String SKILL_CONTENT_TYPE = "odf-enumeration.Skill"; 084 085 /** The attribute name of the skills */ 086 public static final String SKILLS_ATTRIBUTE_NAME = "skills"; 087 088 /** The skills other names attribute name */ 089 public static final String SKILL_OTHER_NAMES_ATTRIBUTE_NAME = "otherNames"; 090 091 /** The content workflow helper */ 092 protected ContentWorkflowHelper _contentWorkflowHelper; 093 094 /** The ametys object resolver */ 095 protected AmetysObjectResolver _resolver; 096 097 /** The ODF helper */ 098 protected ODFHelper _odfHelper; 099 100 /** The observation manager */ 101 protected ObservationManager _observationManager; 102 103 private SolrIndexHelper _solrIndexHelper; 104 105 public void service(ServiceManager manager) throws ServiceException 106 { 107 _contentWorkflowHelper = (ContentWorkflowHelper) manager.lookup(ContentWorkflowHelper.ROLE); 108 _resolver = (AmetysObjectResolver) manager.lookup(AmetysObjectResolver.ROLE); 109 _odfHelper = (ODFHelper) manager.lookup(ODFHelper.ROLE); 110 _observationManager = (ObservationManager) manager.lookup(ObservationManager.ROLE); 111 _solrIndexHelper = (SolrIndexHelper) manager.lookup(SolrIndexHelper.ROLE); 112 } 113 114 /** 115 * Get the computed skill values for a given {@link AbstractProgram} 116 * @param abstractProgram the abstract program 117 * @param maxDepth the max depth of courses. For example, set to 1 to compute skills over UE only, set 2 to compute skills over UE and EC, ... 118 * @return the skill values computed from the attached courses 119 */ 120 public Set<ContentValue> getComputedSkills(AbstractProgram abstractProgram, int maxDepth) 121 { 122 return _computeSkills(abstractProgram, 1, maxDepth); 123 } 124 125 private Set<ContentValue> _computeSkills(ProgramItem programItem, int depth, int maxDepth) 126 { 127 Set<ContentValue> skills = new HashSet<>(); 128 129 if (programItem instanceof Course) 130 { 131 // Get skills of current course 132 ModifiableModelAwareRepeater repeaterSkills = ((Course) programItem).getRepeater(COURSES_REPEATER_ACQUIRED_SKILLS); 133 if (repeaterSkills != null) 134 { 135 List< ? extends ModifiableModelAwareRepeaterEntry> entries = repeaterSkills.getEntries(); 136 for (ModifiableModelAwareRepeaterEntry entry : entries) 137 { 138 ModifiableModelAwareRepeater repeater = entry.getRepeater("skills"); 139 if (repeater != null) 140 { 141 skills.addAll(repeater.getEntries().stream() 142 .map(e -> (ContentValue) e.getValue("skill", false, null)) 143 .filter(Objects::nonNull) 144 .collect(Collectors.toSet())); 145 } 146 } 147 } 148 149 if (depth < maxDepth) 150 { 151 ((Course) programItem).getCourseLists() 152 .stream() 153 .forEach(cl -> 154 { 155 cl.getCourses() 156 .stream().forEach(c -> 157 { 158 skills.addAll(_computeSkills(c, depth + 1, maxDepth)); 159 }); 160 }); 161 } 162 } 163 else 164 { 165 List<ProgramItem> childProgramItems = _odfHelper.getChildProgramItems(programItem); 166 for (ProgramItem childProgramItem : childProgramItems) 167 { 168 skills.addAll(_computeSkills(childProgramItem, depth, maxDepth)); 169 } 170 } 171 172 return skills; 173 } 174 175 /** 176 * Get the skills distribution by courses over a {@link ProgramItem} 177 * Distribution is computed over the course of first level only 178 * @param programItem the program item 179 * @return the skills distribution 180 */ 181 public Map<Content, Map<Content, Map<Content, Content>>> getSkillsDistribution(ProgramItem programItem) 182 { 183 return getSkillsDistribution(programItem, 1); 184 } 185 186 /** 187 * Get the skills distribution by courses over a {@link ProgramItem} 188 * @param programItem the program item 189 * @param maxDepth the max depth of courses. For example, set to 1 to compute distribution over UE only, set 2 to compute distribution over UE and EC, ... 190 * @return the skills distribution as Map<SkillSet, Map<Skill, Map<Course, AcquisitionLevel>>> 191 */ 192 public Map<Content, Map<Content, Map<Content, Content>>> getSkillsDistribution(ProgramItem programItem, int maxDepth) 193 { 194 // Map<SkillSet, Map<Skill, Map<Course, AcquisitionLevel>>> 195 Map<Content, Map<Content, Map<Content, Content>>> skillsDistribution = new LinkedHashMap<>(); 196 197 _buildSkillsDistribution(programItem, skillsDistribution, maxDepth); 198 199 return skillsDistribution; 200 } 201 202 203 204 private void _buildSkillsDistribution(ProgramItem programItem, Map<Content, Map<Content, Map<Content, Content>>> skillsDistribution, int maxDepth) 205 { 206 if (programItem instanceof Course) 207 { 208 _buildSkillsDistribution((Course) programItem, (Course) programItem, skillsDistribution, 1, maxDepth); 209 } 210 else 211 { 212 List<ProgramItem> childProgramItems = _odfHelper.getChildProgramItems(programItem); 213 for (ProgramItem childProgramItem : childProgramItems) 214 { 215 _buildSkillsDistribution(childProgramItem, skillsDistribution, maxDepth); 216 } 217 } 218 } 219 220 private void _buildSkillsDistribution(Course course, Course parentCourse, Map<Content, Map<Content, Map<Content, Content>>> skillsDistribution, int depth, int maxDepth) 221 { 222 List<? extends ModifiableModelAwareRepeaterEntry> acquiredSkillEntries = Optional.of(course) 223 .map(e -> e.getRepeater(COURSES_REPEATER_ACQUIRED_SKILLS)) 224 .map(ModifiableModelAwareRepeater::getEntries) 225 .orElse(List.of()); 226 227 for (ModifiableModelAwareRepeaterEntry acquiredSkillEntry : acquiredSkillEntries) 228 { 229 Optional.of(acquiredSkillEntry) 230 .map(e -> e.<ContentValue>getValue("skillSet")) 231 .flatMap(ContentValue::getContentIfExists) 232 .ifPresent( 233 skillSet -> 234 { 235 Map<Content, Map<Content, Content>> skills = skillsDistribution.computeIfAbsent(skillSet, __ -> new LinkedHashMap<>()); 236 237 List<? extends ModifiableModelAwareRepeaterEntry> skillEntries = Optional.of(acquiredSkillEntry) 238 .map(e -> e.getRepeater("skills")) 239 .map(ModifiableModelAwareRepeater::getEntries) 240 .orElse(List.of()); 241 242 for (ModifiableModelAwareRepeaterEntry skillEntry : skillEntries) 243 { 244 Content skill = Optional.of(skillEntry) 245 .map(entry -> entry.<ContentValue>getValue("skill")) 246 .flatMap(ContentValue::getContentIfExists) 247 .orElse(null); 248 249 if (skill != null) 250 { 251 Content acquisitionLevel = 252 Optional.of(skillEntry) 253 .map(entry -> entry.<ContentValue>getValue("acquisitionLevel")) 254 .flatMap(ContentValue::getContentIfExists) 255 .orElse(null); 256 257 Map<Content, Content> courses = skills.computeIfAbsent(skill, s -> new LinkedHashMap<>()); 258 courses.put(parentCourse, _getMaxAcquisitionLevel(acquisitionLevel, courses.get(parentCourse))); 259 } 260 } 261 } 262 ); 263 } 264 265 if (depth < maxDepth) 266 { 267 // Get skills distribution over child courses 268 course.getCourseLists() 269 .stream() 270 .forEach(cl -> 271 { 272 cl.getCourses() 273 .stream().forEach(c -> 274 { 275 _buildSkillsDistribution(c, parentCourse, skillsDistribution, depth + 1, maxDepth); 276 }); 277 }); 278 } 279 } 280 281 private Content _getMaxAcquisitionLevel(Content level1, Content level2) 282 { 283 if (level1 == null) 284 { 285 return level2; 286 } 287 288 if (level2 == null) 289 { 290 return level1; 291 } 292 293 long order1 = level1.getValue("order", false, -1L); 294 long order2 = level2.getValue("order", false, -1L); 295 296 if (order1 >= order2) 297 { 298 return level1; 299 } 300 else 301 { 302 return level2; 303 } 304 } 305 306 /** 307 * Create all skills from ESCO file 308 * @param skillsCSVFilePath the skills CSV file path 309 */ 310 public void createSkillsFromESCOFileCSV(String skillsCSVFilePath) 311 { 312 String[] handledEvents = new String[] { 313 ObservationConstants.EVENT_CONTENT_ADDED, 314 ObservationConstants.EVENT_CONTENT_MODIFIED, 315 ObservationConstants.EVENT_CONTENT_WORKFLOW_CHANGED, 316 ObservationConstants.EVENT_CONTENT_TAGGED, 317 ObservationConstants.EVENT_CONTENT_DELETED, 318 }; 319 320 try 321 { 322 _solrIndexHelper.pauseSolrCommitForEvents(handledEvents); 323 for (Skill skill : _getSkillsFromCSVFile(skillsCSVFilePath)) 324 { 325 try 326 { 327 _createSkillTableRef(skill); 328 } 329 catch (AmetysRepositoryException | WorkflowException e) 330 { 331 getLogger().warn("An error occurred creating skill with label {}", skill.getLabel(), e); 332 } 333 } 334 } 335 finally 336 { 337 _solrIndexHelper.restartSolrCommitForEvents(handledEvents); 338 } 339 } 340 341 /** 342 * Get the list of skills from the csv file 343 * @param skillsCSVFilePath the skills CSV file path 344 * @return the list of skills 345 */ 346 protected List<Skill> _getSkillsFromCSVFile(String skillsCSVFilePath) 347 { 348 List<Skill> skills = new ArrayList<>(); 349 try (CsvListReader listReader = new CsvListReader(new InputStreamReader(new FileInputStream(new File(skillsCSVFilePath)), "UTF-8"), CsvPreference.STANDARD_PREFERENCE)) 350 { 351 listReader.getHeader(true); //Skip header 352 353 List<String> read = listReader.read(); 354 while (read != null) 355 { 356 String conceptUri = read.get(1); // Uri 357 String label = read.get(4); // Get label 358 if (StringUtils.isNotBlank(label)) 359 { 360 String otherNamesAsString = read.get(5); // Get other names 361 String[] otherNames = StringUtils.isNotBlank(otherNamesAsString) ? StringUtils.split(otherNamesAsString, "\n") : ArrayUtils.EMPTY_STRING_ARRAY; 362 skills.add(new Skill(label, otherNames, conceptUri)); 363 } 364 read = listReader.read(); 365 } 366 } 367 catch (IOException e) 368 { 369 getLogger().warn("An error occurred parsing file {}", skillsCSVFilePath, e); 370 } 371 372 getLogger().info("Find {} skills into file {}", skills.size(), skillsCSVFilePath); 373 374 return skills; 375 } 376 377 /** 378 * Create a skill table ref content from the skill object 379 * @param skill the skill object 380 * @throws AmetysRepositoryException if a repository error occurred 381 * @throws WorkflowException if a workflow error occurred 382 */ 383 protected void _createSkillTableRef(Skill skill) throws AmetysRepositoryException, WorkflowException 384 { 385 String uri = skill.getConceptUri(); 386 String titleFR = skill.getLabel(); 387 String[] otherNames = skill.getOtherNames(); 388 389 ContentTypeExpression cTypeExpr = new ContentTypeExpression(Operator.EQ, SKILL_CONTENT_TYPE); 390 StringExpression codeExpr = new StringExpression(OdfReferenceTableEntry.CODE, Operator.EQ, uri); 391 392 String xpathQuery = ContentQueryHelper.getContentXPathQuery(new AndExpression(cTypeExpr, codeExpr)); 393 AmetysObjectIterable<ModifiableContent> contents = _resolver.query(xpathQuery); 394 AmetysObjectIterator<ModifiableContent> it = contents.iterator(); 395 396 if (!it.hasNext()) 397 { 398 Map<String, String> titleVariants = new HashMap<>(); 399 titleVariants.put("fr", titleFR); 400 401 Map<String, Object> result = _contentWorkflowHelper.createContent("reference-table", 1, titleFR, titleVariants, new String[] {SKILL_CONTENT_TYPE}, new String[0]); 402 ModifiableContent content = (ModifiableContent) result.get(AbstractContentWorkflowComponent.CONTENT_KEY); 403 404 content.setValue(OdfReferenceTableEntry.CODE, uri); 405 406 if (otherNames.length > 0) 407 { 408 content.setValue(SKILL_OTHER_NAMES_ATTRIBUTE_NAME, otherNames); 409 } 410 411 content.saveChanges(); 412 _contentWorkflowHelper.doAction((WorkflowAwareContent) content, 22); 413 414 getLogger().info("Skill's content \"{}\" ({}) was successfully created", titleFR, content.getId()); 415 } 416 } 417 418 private static class Skill 419 { 420 private String _label; 421 private String[] _otherNames; 422 private String _conceptUri; 423 424 public Skill(String label, String[] otherNames, String conceptUri) 425 { 426 _label = label; 427 _otherNames = otherNames; 428 _conceptUri = conceptUri; 429 } 430 431 public String getLabel() 432 { 433 return _label; 434 } 435 436 public String[] getOtherNames() 437 { 438 return _otherNames; 439 } 440 441 public String getConceptUri() 442 { 443 return _conceptUri; 444 } 445 } 446}