import org.apache.spark.sql.Dataset; //导入方法依赖的package包/类
/**
* Returns a new ValueSets instance that includes the given value sets.
*
* @param valueSets the value sets to add to the returned collection.
* @return a new ValueSets instance with the added value sets.
*/
public ValueSets withValueSets(Dataset valueSets) {
Dataset newMembers = getUrlAndVersions(valueSets);
// Ensure that there are no duplicates among the value sets
if (hasDuplicateUrlAndVersions(newMembers) || valueSets.count() != newMembers.count()) {
throw new IllegalArgumentException(
"Cannot add value sets having duplicate valueSetUri and valueSetVersion");
}
// The value set concepts will be stored in the values table for persistence, so we remove
// them from the individual value sets. This can be done most easily by setting concepts to an
// empty list.
Dataset withoutConcepts = valueSets.map((MapFunction) valueSet -> {
ValueSet valueSetWithoutConcepts = valueSet.copy();
List updatedInclusions = new ArrayList<>();
for (ConceptSetComponent inclusion: valueSet.getCompose().getInclude()) {
ConceptSetComponent inclusionWithoutConcepts = inclusion.copy();
inclusionWithoutConcepts.setConcept(new ArrayList<>());
updatedInclusions.add(inclusionWithoutConcepts);
}
valueSetWithoutConcepts.getCompose().setInclude(updatedInclusions);
return valueSetWithoutConcepts;
}, VALUE_SET_ENCODER);
Dataset newValues = valueSets.flatMap(ValueSets::expandValuesIterator, VALUE_ENCODER);
return withValueSets(withoutConcepts, newValues);
}