Skip to content

Commit

Permalink
adt: cleanup some stream usages
Browse files Browse the repository at this point in the history
  • Loading branch information
mtf90 committed Sep 2, 2024
1 parent 0cc1f85 commit 9f65898
Show file tree
Hide file tree
Showing 3 changed files with 45 additions and 24 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,10 @@
import java.util.HashSet;
import java.util.LinkedList;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Optional;
import java.util.Queue;
import java.util.Set;
import java.util.stream.Collectors;

import de.learnlib.algorithm.adt.adt.ADTLeafNode;
import de.learnlib.algorithm.adt.adt.ADTNode;
Expand All @@ -33,6 +33,7 @@
import net.automatalib.automaton.concept.StateIDs;
import net.automatalib.automaton.transducer.MealyMachine;
import net.automatalib.common.smartcollection.ReflexiveMapView;
import net.automatalib.common.util.HashUtil;
import net.automatalib.common.util.Pair;
import net.automatalib.util.automaton.ads.ADSUtil;
import net.automatalib.word.Word;
Expand Down Expand Up @@ -144,11 +145,12 @@ private Optional<ADTNode<S, I, O>> compute(Map<S, S> mapping) {

@SuppressWarnings("nullness") // false positive https://github.com/typetools/checker-framework/issues/399
final @NonNull Word<I> prefix = splittingWordCandidates.poll();
final Map<S, S> currentToInitialMapping = mapping.keySet()
.stream()
.collect(Collectors.toMap(x -> automaton.getSuccessor(x,
prefix),
mapping::get));
final Map<S, S> currentToInitialMapping = new HashMap<>(HashUtil.capacity(mapping.size()));

for (Entry<S, S> e : mapping.entrySet()) {
currentToInitialMapping.put(automaton.getSuccessor(e.getKey(), prefix), e.getValue());
}

final BitSet currentSetAsBitSet = new BitSet();
for (S s : currentToInitialMapping.keySet()) {
currentSetAsBitSet.set(stateIds.getStateId(s));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
Expand Down Expand Up @@ -60,6 +62,7 @@
import net.automatalib.alphabet.Alphabet;
import net.automatalib.alphabet.SupportsGrowingAlphabet;
import net.automatalib.automaton.transducer.MealyMachine;
import net.automatalib.common.util.HashUtil;
import net.automatalib.common.util.Pair;
import net.automatalib.word.Word;
import org.slf4j.Logger;
Expand Down Expand Up @@ -258,29 +261,37 @@ public boolean refineHypothesisInternal(DefaultQuery<I, Word<O>> ceQuery) {
newNode.setHypothesisState(newState);

final ADTNode<ADTState<I, O>, I, O> temporarySplitter = ADTUtil.getStartOfADS(nodeToSplit);
final List<ADTTransition<I, O>> newTransitions = alphabet.stream()
.map(i -> this.hypothesis.createOpenTransition(newState,
i,
this.adt.getRoot()))
.collect(Collectors.toList());
final List<ADTTransition<I, O>> newTransitions = new ArrayList<>(alphabet.size());

for (I i : alphabet) {
newTransitions.add(this.hypothesis.createOpenTransition(newState, i, this.adt.getRoot()));
}

final List<ADTTransition<I, O>> transitionsToRefine = nodeToSplit.getHypothesisState()
.getIncomingTransitions()
.stream()
.filter(x -> !x.isSpanningTreeEdge())
.collect(Collectors.toList());

transitionsToRefine.forEach(x -> {
for (ADTTransition<I, O> x : transitionsToRefine) {
x.setTarget(null);
x.setSiftNode(temporarySplitter);
});
}

final ADTNode<ADTState<I, O>, I, O> finalizedSplitter = this.evaluateAdtExtension(temporarySplitter);

transitionsToRefine.stream().filter(ADTTransition::needsSifting).forEach(x -> {
x.setSiftNode(finalizedSplitter);
this.openTransitions.add(x);
});
newTransitions.stream().filter(ADTTransition::needsSifting).forEach(this.openTransitions::add);
for (ADTTransition<I, O> t : transitionsToRefine) {
if (t.needsSifting()) {
t.setSiftNode(finalizedSplitter);
this.openTransitions.add(t);
}
}

for (ADTTransition<I, O> t : newTransitions) {
if (t.needsSifting()) {
this.openTransitions.add(t);
}
}

this.closeTransitions();
return true;
Expand Down Expand Up @@ -597,20 +608,22 @@ private boolean validateADS(ADTNode<ADTState<I, O>, I, O> oldADS,
}

final Set<ADTNode<ADTState<I, O>, I, O>> newFinalNodes = ADTUtil.collectLeaves(newADS);
final Map<ADTState<I, O>, Pair<Word<I>, Word<O>>> traces =
new HashMap<>(HashUtil.capacity(newFinalNodes.size()));

for (ADTNode<ADTState<I, O>, I, O> n : newFinalNodes) {
traces.put(n.getHypothesisState(), ADTUtil.buildTraceForNode(n));
}

final Set<ADTState<I, O>> oldFinalStates = ADTUtil.collectHypothesisStates(oldADS);
final Set<ADTState<I, O>> newFinalStates =
newFinalNodes.stream().map(ADTNode::getHypothesisState).collect(Collectors.toSet());
final Set<ADTState<I, O>> newFinalStates = new HashSet<>(traces.keySet());
newFinalStates.addAll(cutout);

if (!oldFinalStates.equals(newFinalStates)) {
throw new IllegalArgumentException("New ADS does not cover all old nodes");
}

final Word<I> parentInputTrace = ADTUtil.buildTraceForNode(oldADS).getFirst();
final Map<ADTState<I, O>, Pair<Word<I>, Word<O>>> traces = newFinalNodes.stream()
.collect(Collectors.toMap(ADTNode::getHypothesisState,
ADTUtil::buildTraceForNode));

for (Map.Entry<ADTState<I, O>, Pair<Word<I>, Word<O>>> entry : traces.entrySet()) {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -232,7 +232,13 @@ private static <S, I, O> int computeEffectiveResetsInternal(ADTNode<S, I, O> ads

final int nextCosts = ADTUtil.isResetNode(ads) ? accumulatedResets + 1 : accumulatedResets;

return ads.getChildren().values().stream().mapToInt(x -> computeEffectiveResetsInternal(x, nextCosts)).sum();
int resets = 0;

for (ADTNode<S, I, O> value : ads.getChildren().values()) {
resets += computeEffectiveResetsInternal(value, nextCosts);
}

return resets;
}

public static <S, I, O> Pair<ADTNode<S, I, O>, ADTNode<S, I, O>> buildADSFromTrace(MealyMachine<S, I, ?, O> automaton,
Expand Down

0 comments on commit 9f65898

Please sign in to comment.