001/* 002 * Copyright 2019 Anyware Services 003 * 004 * Licensed under the Apache License, Version 2.0 (the "License"); 005 * you may not use this file except in compliance with the License. 006 * You may obtain a copy of the License at 007 * 008 * http://www.apache.org/licenses/LICENSE-2.0 009 * 010 * Unless required by applicable law or agreed to in writing, software 011 * distributed under the License is distributed on an "AS IS" BASIS, 012 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 013 * See the License for the specific language governing permissions and 014 * limitations under the License. 015 */ 016package org.ametys.odf.skill; 017 018import java.io.File; 019import java.io.FileInputStream; 020import java.io.IOException; 021import java.io.InputStreamReader; 022import java.util.ArrayList; 023import java.util.HashMap; 024import java.util.HashSet; 025import java.util.LinkedHashMap; 026import java.util.List; 027import java.util.Map; 028import java.util.Objects; 029import java.util.Set; 030import java.util.stream.Collectors; 031 032import org.apache.avalon.framework.component.Component; 033import org.apache.avalon.framework.service.ServiceException; 034import org.apache.avalon.framework.service.ServiceManager; 035import org.apache.avalon.framework.service.Serviceable; 036import org.apache.commons.lang.ArrayUtils; 037import org.apache.commons.lang3.StringUtils; 038import org.supercsv.io.CsvListReader; 039import org.supercsv.prefs.CsvPreference; 040 041import org.ametys.cms.ObservationConstants; 042import org.ametys.cms.data.ContentValue; 043import org.ametys.cms.repository.Content; 044import org.ametys.cms.repository.ContentQueryHelper; 045import org.ametys.cms.repository.ContentTypeExpression; 046import org.ametys.cms.repository.ModifiableContent; 047import org.ametys.cms.repository.WorkflowAwareContent; 048import org.ametys.cms.workflow.AbstractContentWorkflowComponent; 049import org.ametys.cms.workflow.ContentWorkflowHelper; 050import org.ametys.core.observation.ObservationManager; 051import org.ametys.odf.ODFHelper; 052import org.ametys.odf.ProgramItem; 053import org.ametys.odf.course.Course; 054import org.ametys.odf.enumeration.OdfReferenceTableEntry; 055import org.ametys.odf.program.AbstractProgram; 056import org.ametys.plugins.repository.AmetysObjectIterable; 057import org.ametys.plugins.repository.AmetysObjectIterator; 058import org.ametys.plugins.repository.AmetysObjectResolver; 059import org.ametys.plugins.repository.AmetysRepositoryException; 060import org.ametys.plugins.repository.data.holder.group.impl.ModifiableModelAwareRepeater; 061import org.ametys.plugins.repository.data.holder.group.impl.ModifiableModelAwareRepeaterEntry; 062import org.ametys.plugins.repository.query.expression.AndExpression; 063import org.ametys.plugins.repository.query.expression.Expression.Operator; 064import org.ametys.plugins.repository.query.expression.StringExpression; 065import org.ametys.runtime.plugin.component.AbstractLogEnabled; 066 067import com.opensymphony.workflow.WorkflowException; 068 069/** 070 * ODF skills helper 071 */ 072public class ODFSkillsHelper extends AbstractLogEnabled implements Serviceable, Component 073{ 074 /** The avalon role. */ 075 public static final String ROLE = ODFSkillsHelper.class.getName(); 076 077 /** The name of repeater for acquired skills */ 078 public static final String COURSES_REPEATER_ACQUIRED_SKILLS = "acquiredSkills"; 079 080 /** The skill content type id */ 081 public static final String SKILL_CONTENT_TYPE = "odf-enumeration.Skill"; 082 083 /** The attribute name of the skills */ 084 public static final String SKILLS_ATTRIBUTE_NAME = "skills"; 085 086 /** The skills other names attribute name */ 087 public static final String SKILL_OTHER_NAMES_ATTRIBUTE_NAME = "otherNames"; 088 089 /** The content workflow helper */ 090 protected ContentWorkflowHelper _contentWorkflowHelper; 091 092 /** The ametys object resolver */ 093 protected AmetysObjectResolver _resolver; 094 095 /** The ODF helper */ 096 protected ODFHelper _odfHelper; 097 098 /** The observation manager */ 099 protected ObservationManager _observationManager; 100 101 public void service(ServiceManager manager) throws ServiceException 102 { 103 _contentWorkflowHelper = (ContentWorkflowHelper) manager.lookup(ContentWorkflowHelper.ROLE); 104 _resolver = (AmetysObjectResolver) manager.lookup(AmetysObjectResolver.ROLE); 105 _odfHelper = (ODFHelper) manager.lookup(ODFHelper.ROLE); 106 _observationManager = (ObservationManager) manager.lookup(ObservationManager.ROLE); 107 } 108 109 /** 110 * Get the computed skill values for a given {@link AbstractProgram} 111 * @param abstractProgram the abstract program 112 * @param maxDepth the max depth of courses. For example, set to 1 to compute skills over UE only, set 2 to compute skills over UE and EC, ... 113 * @return the skill values computed from the attached courses 114 */ 115 public Set<ContentValue> getComputedSkills(AbstractProgram abstractProgram, int maxDepth) 116 { 117 return _computeSkills(abstractProgram, 1, maxDepth); 118 } 119 120 private Set<ContentValue> _computeSkills(ProgramItem programItem, int depth, int maxDepth) 121 { 122 Set<ContentValue> skills = new HashSet<>(); 123 124 if (programItem instanceof Course) 125 { 126 // Get skills of current course 127 ModifiableModelAwareRepeater repeaterSkills = ((Course) programItem).getRepeater(COURSES_REPEATER_ACQUIRED_SKILLS); 128 if (repeaterSkills != null) 129 { 130 List< ? extends ModifiableModelAwareRepeaterEntry> entries = repeaterSkills.getEntries(); 131 for (ModifiableModelAwareRepeaterEntry entry : entries) 132 { 133 ModifiableModelAwareRepeater repeater = entry.getRepeater("skills"); 134 if (repeater != null) 135 { 136 skills.addAll(repeater.getEntries().stream() 137 .map(e -> (ContentValue) e.getValue("skill", false, null)) 138 .filter(Objects::nonNull) 139 .collect(Collectors.toSet())); 140 } 141 } 142 } 143 144 if (depth < maxDepth) 145 { 146 ((Course) programItem).getCourseLists() 147 .stream() 148 .forEach(cl -> 149 { 150 cl.getCourses() 151 .stream().forEach(c -> 152 { 153 skills.addAll(_computeSkills(c, depth + 1, maxDepth)); 154 }); 155 }); 156 } 157 } 158 else 159 { 160 List<ProgramItem> childProgramItems = _odfHelper.getChildProgramItems(programItem); 161 for (ProgramItem childProgramItem : childProgramItems) 162 { 163 skills.addAll(_computeSkills(childProgramItem, depth, maxDepth)); 164 } 165 } 166 167 return skills; 168 } 169 170 /** 171 * Get the skills distribution by courses over a {@link ProgramItem} 172 * Distribution is computed over the course of first level only 173 * @param programItem the program item 174 * @return the skills distribution 175 */ 176 public Map<Content, Map<Content, Map<Content, Content>>> getSkillsDistribution(ProgramItem programItem) 177 { 178 return getSkillsDistribution(programItem, 1); 179 } 180 181 /** 182 * Get the skills distribution by courses over a {@link ProgramItem} 183 * @param programItem the program item 184 * @param maxDepth the max depth of courses. For example, set to 1 to compute distribution over UE only, set 2 to compute distribution over UE and EC, ... 185 * @return the skills distribution as Map<SkillSet, Map<Skill, Map<Course, AcquisitionLevel>>> 186 */ 187 public Map<Content, Map<Content, Map<Content, Content>>> getSkillsDistribution(ProgramItem programItem, int maxDepth) 188 { 189 // Map<SkillSet, Map<Skill, Map<Course, AcquisitionLevel>>> 190 Map<Content, Map<Content, Map<Content, Content>>> skillsDistribution = new LinkedHashMap<>(); 191 192 _buildSkillsDistribution(programItem, skillsDistribution, maxDepth); 193 194 return skillsDistribution; 195 } 196 197 198 199 private void _buildSkillsDistribution(ProgramItem programItem, Map<Content, Map<Content, Map<Content, Content>>> skillsDistribution, int maxDepth) 200 { 201 if (programItem instanceof Course) 202 { 203 _buildSkillsDistribution((Course) programItem, (Course) programItem, skillsDistribution, 1, maxDepth); 204 } 205 else 206 { 207 List<ProgramItem> childProgramItems = _odfHelper.getChildProgramItems(programItem); 208 for (ProgramItem childProgramItem : childProgramItems) 209 { 210 _buildSkillsDistribution(childProgramItem, skillsDistribution, maxDepth); 211 } 212 } 213 } 214 215 private void _buildSkillsDistribution(Course course, Course parentCourse, Map<Content, Map<Content, Map<Content, Content>>> skillsDistribution, int depth, int maxDepth) 216 { 217 ModifiableModelAwareRepeater repeaterSkillSets = course.getRepeater(COURSES_REPEATER_ACQUIRED_SKILLS); 218 if (repeaterSkillSets != null) 219 { 220 List< ? extends ModifiableModelAwareRepeaterEntry> entries = repeaterSkillSets.getEntries(); 221 222 entries.stream() 223 .forEach(e -> 224 { 225 ContentValue skillSetValue = e.getValue("skillSet"); 226 Content skillSet = skillSetValue != null ? skillSetValue.getContentIfExists().orElse(null) : null; 227 if (skillSet != null) 228 { 229 Map<Content, Map<Content, Content>> skills = skillsDistribution.computeIfAbsent(skillSet, s -> new LinkedHashMap<>()); 230 231 ModifiableModelAwareRepeater repeaterSkills = e.getRepeater("skills"); 232 if (repeaterSkills != null) 233 { 234 repeaterSkills.getEntries().stream() 235 .forEach(e2 -> 236 { 237 ContentValue skillValue = (ContentValue) e2.getValue("skill", false, null); 238 Content skill = skillValue != null ? skillValue.getContentIfExists().orElse(null) : null; 239 240 if (skill != null) 241 { 242 ContentValue acquisitionLevelValue = (ContentValue) e2.getValue("acquisitionLevel", false, null); 243 Content acquisitionLevel = acquisitionLevelValue != null ? acquisitionLevelValue.getContentIfExists().orElse(null) : null; 244 245 Map<Content, Content> courses = skills.computeIfAbsent(skill, s -> new LinkedHashMap<>()); 246 247 if (courses.containsKey(parentCourse)) 248 { 249 acquisitionLevel = _getMaxAcquisitionLevel(acquisitionLevel, courses.get(parentCourse)); 250 } 251 252 courses.put(parentCourse, acquisitionLevel); 253 } 254 }); 255 } 256 } 257 }); 258 } 259 260 if (depth < maxDepth) 261 { 262 // Get skills distribution over child courses 263 course.getCourseLists() 264 .stream() 265 .forEach(cl -> 266 { 267 cl.getCourses() 268 .stream().forEach(c -> 269 { 270 _buildSkillsDistribution(c, parentCourse, skillsDistribution, depth + 1, maxDepth); 271 }); 272 }); 273 } 274 } 275 276 private Content _getMaxAcquisitionLevel(Content level1, Content level2) 277 { 278 if (level1 == null) 279 { 280 return level2; 281 } 282 283 if (level2 == null) 284 { 285 return level1; 286 } 287 288 long order1 = level1.getValue("order", false, -1L); 289 long order2 = level2.getValue("order", false, -1L); 290 291 if (order1 >= order2) 292 { 293 return level1; 294 } 295 else 296 { 297 return level2; 298 } 299 } 300 301 /** 302 * Create all skills from ESCO file 303 * @param skillsCSVFilePath the skills CSV file path 304 */ 305 public void createSkillsFromESCOFileCSV(String skillsCSVFilePath) 306 { 307 String[] events = new String[] { 308 ObservationConstants.EVENT_CONTENT_ADDED, 309 ObservationConstants.EVENT_CONTENT_MODIFIED, 310 ObservationConstants.EVENT_CONTENT_WORKFLOW_CHANGED, 311 ObservationConstants.EVENT_CONTENT_TAGGED, 312 ObservationConstants.EVENT_CONTENT_DELETED, 313 }; 314 _observationManager.addArgumentForEvents(events, ObservationConstants.ARGS_CONTENT_COMMIT, false); 315 316 try 317 { 318 for (Skill skill : _getSkillsFromCSVFile(skillsCSVFilePath)) 319 { 320 try 321 { 322 _createSkillTableRef(skill); 323 } 324 catch (AmetysRepositoryException | WorkflowException e) 325 { 326 getLogger().warn("An error occurred creating skill with label {}", skill.getLabel(), e); 327 } 328 } 329 } 330 finally 331 { 332 _observationManager.removeArgumentForEvents(events, ObservationConstants.ARGS_CONTENT_COMMIT); 333 } 334 } 335 336 /** 337 * Get the list of skills from the csv file 338 * @param skillsCSVFilePath the skills CSV file path 339 * @return the list of skills 340 */ 341 protected List<Skill> _getSkillsFromCSVFile(String skillsCSVFilePath) 342 { 343 List<Skill> skills = new ArrayList<>(); 344 try (CsvListReader listReader = new CsvListReader(new InputStreamReader(new FileInputStream(new File(skillsCSVFilePath)), "UTF-8"), CsvPreference.STANDARD_PREFERENCE)) 345 { 346 listReader.getHeader(true); //Skip header 347 348 List<String> read = listReader.read(); 349 while (read != null) 350 { 351 String conceptUri = read.get(1); // Uri 352 String label = read.get(4); // Get label 353 if (StringUtils.isNotBlank(label)) 354 { 355 String otherNamesAsString = read.get(5); // Get other names 356 String[] otherNames = StringUtils.isNotBlank(otherNamesAsString) ? StringUtils.split(otherNamesAsString, "\n") : ArrayUtils.EMPTY_STRING_ARRAY; 357 skills.add(new Skill(label, otherNames, conceptUri)); 358 } 359 read = listReader.read(); 360 } 361 } 362 catch (IOException e) 363 { 364 getLogger().warn("An error occurred parsing file {}", skillsCSVFilePath, e); 365 } 366 367 getLogger().info("Find {} skills into file {}", skills.size(), skillsCSVFilePath); 368 369 return skills; 370 } 371 372 /** 373 * Create a skill table ref content from the skill object 374 * @param skill the skill object 375 * @throws AmetysRepositoryException if a repository error occurred 376 * @throws WorkflowException if a workflow error occurred 377 */ 378 protected void _createSkillTableRef(Skill skill) throws AmetysRepositoryException, WorkflowException 379 { 380 String uri = skill.getConceptUri(); 381 String titleFR = skill.getLabel(); 382 String[] otherNames = skill.getOtherNames(); 383 384 ContentTypeExpression cTypeExpr = new ContentTypeExpression(Operator.EQ, SKILL_CONTENT_TYPE); 385 StringExpression codeExpr = new StringExpression(OdfReferenceTableEntry.CODE, Operator.EQ, uri); 386 387 String xpathQuery = ContentQueryHelper.getContentXPathQuery(new AndExpression(cTypeExpr, codeExpr)); 388 AmetysObjectIterable<ModifiableContent> contents = _resolver.query(xpathQuery); 389 AmetysObjectIterator<ModifiableContent> it = contents.iterator(); 390 391 if (!it.hasNext()) 392 { 393 Map<String, String> titleVariants = new HashMap<>(); 394 titleVariants.put("fr", titleFR); 395 396 Map<String, Object> result = _contentWorkflowHelper.createContent("reference-table", 1, titleFR, titleVariants, new String[] {SKILL_CONTENT_TYPE}, new String[0]); 397 ModifiableContent content = (ModifiableContent) result.get(AbstractContentWorkflowComponent.CONTENT_KEY); 398 399 content.setValue(OdfReferenceTableEntry.CODE, uri); 400 401 if (otherNames.length > 0) 402 { 403 content.setValue(SKILL_OTHER_NAMES_ATTRIBUTE_NAME, otherNames); 404 } 405 406 content.saveChanges(); 407 _contentWorkflowHelper.doAction((WorkflowAwareContent) content, 22); 408 409 getLogger().info("Skill's content \"{}\" ({}) was successfully created", titleFR, content.getId()); 410 } 411 } 412 413 private static class Skill 414 { 415 private String _label; 416 private String[] _otherNames; 417 private String _conceptUri; 418 419 public Skill(String label, String[] otherNames, String conceptUri) 420 { 421 _label = label; 422 _otherNames = otherNames; 423 _conceptUri = conceptUri; 424 } 425 426 public String getLabel() 427 { 428 return _label; 429 } 430 431 public String[] getOtherNames() 432 { 433 return _otherNames; 434 } 435 436 public String getConceptUri() 437 { 438 return _conceptUri; 439 } 440 } 441}