Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
  • Loading branch information
thsa committed Jul 14, 2024
2 parents e34d0e6 + 67d97fd commit eafbb27
Show file tree
Hide file tree
Showing 6 changed files with 71 additions and 161 deletions.
2 changes: 1 addition & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
Please follow the naming scheme YEAR.MONTH.RELEASE_NO_OF_MONTH
(eg. 2016.4.1 for second release in Apr 2016)
-->
<version>2024.7.1-SNAPSHOT</version>
<version>2024.7.2-SNAPSHOT</version>

<name>OpenChemLib</name>
<description>Open Source Chemistry Library</description>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,9 @@ public interface IMolDistHist extends ICompleteGraph {
byte [] getDistHist(int indexAt1, int indexAt2, byte[] arr);

boolean isInevitablePharmacophorePoint(int indexNode);


double getWeightPharmacophorePoint(int indexNode);

int getNumInevitablePharmacophorePoints();


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -459,9 +459,12 @@ public int getNumInevitablePharmacophorePoints() {

@Override
public boolean isInevitablePharmacophorePoint(int indexNode) {
// TODO Auto-generated method stub
return false;
}
@Override
public double getWeightPharmacophorePoint(int indexNode) {
return 1.0;
}

public static int getNumBytesEntry(){
return PPNode.getNumBytesEntry()+1;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,8 @@ public class MolDistHistViz extends DistHist implements Serializable, IMolDistHi
private boolean finalized;

private HashSet<Integer> hsIndexInevitablePPPoints;

private double [] arrWeight;

// List of the original distance table generated by the conformations.
private List<float[][]> liDistanceTable;

Expand Down Expand Up @@ -167,19 +168,14 @@ public MolDistHistViz(MolDistHist mdh) {
if(mdh.getNumPPNodes()==0){
throw new RuntimeException("Empty object given into constructor.");
}

mdh.copy(this);

modeFlexophore = mdh.getModeFlexophore();

liPPNodeViz=new ArrayList<PPNodeViz>(mdh.getNumPPNodes());
liPPNodeViz=new ArrayList<>(mdh.getNumPPNodes());
for (int i = 0; i < mdh.getNumPPNodes(); i++) {
PPNodeViz node = new PPNodeViz(mdh.getNode(i));
liPPNodeViz.add(node);
}

hsIndexInevitablePPPoints = new HashSet<Integer>();

hsIndexInevitablePPPoints = new HashSet<>();
realize();
}

Expand All @@ -191,6 +187,13 @@ public void addInevitablePharmacophorePoint(int indexPPNode){
hsIndexInevitablePPPoints.add(indexPPNode);
}

public void setNodeWeight(int indexNode, double weight){
if(!finalized){
throw new RuntimeException("MolDistHistViz not finalized!");
}
arrWeight[indexNode]=weight;
}

public void removeInevitablePharmacophorePoint(int indexPPNode){
hsIndexInevitablePPPoints.remove(indexPPNode);
}
Expand Down Expand Up @@ -301,6 +304,10 @@ public void copy(MolDistHistViz copy){
copy.hsIndexInevitablePPPoints.clear();

copy.hsIndexInevitablePPPoints.addAll(hsIndexInevitablePPPoints);

copy.arrWeight = new double[this.arrWeight.length];

System.arraycopy(this.arrWeight, 0, copy.arrWeight, 0, this.arrWeight.length);

}

Expand Down Expand Up @@ -571,6 +578,12 @@ public void swapNodes(int n1, int n2){
setDistHist(n2,i, histTmp);
}
}

double w1 = arrWeight[n1];

arrWeight[n1]=arrWeight[n2];
arrWeight[n2]=w1;

}

private int compare(byte [] arr1, byte [] arr2) {
Expand Down Expand Up @@ -616,7 +629,10 @@ public boolean isInevitablePharmacophorePoint(int indexNode){

return hsIndexInevitablePPPoints.contains(indexNode);
}

@Override
public double getWeightPharmacophorePoint(int indexNode) {
return arrWeight[indexNode];
}
public boolean isAliphatic(int indexNode) {

boolean aliphatic = true;
Expand Down Expand Up @@ -757,6 +773,10 @@ private int calcNumCExclusiveNodes(){
* Must be called after changes in the nodes or distance histograms.
*/
public void realize() {

arrWeight = new double[liPPNodeViz.size()];
Arrays.fill(arrWeight, 1.0);

for(PPNodeViz node : liPPNodeViz){
node.realize();
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
package com.actelion.research.chem.descriptor.flexophore.completegraphmatcher;

import com.actelion.research.calc.ArrayUtilsCalc;
import com.actelion.research.calc.Matrix;
import com.actelion.research.calc.graph.MinimumSpanningTree;
import com.actelion.research.chem.descriptor.DescriptorHandlerFlexophore;
Expand Down Expand Up @@ -93,6 +94,8 @@ public class ObjectiveBlurFlexophoreHardMatchUncovered implements IObjectiveComp

private double [][] arrRelativeDistanceMatrixBase;

private double [] arrSimilarityTmp;

private Matrix maHelperAdjacencyQuery;

private Matrix maHelperAdjacencyBase;
Expand Down Expand Up @@ -192,6 +195,8 @@ private void initSimilarityMatrices(){
arrSimilarityHistograms[i] = new float [maxNumHistograms];
Arrays.fill(arrSimilarityHistograms[i], INIT_VAL);
}

arrSimilarityTmp=new double[ConstantsFlexophore.MAX_NUM_NODES_FLEXOPHORE];
}

/**
Expand Down Expand Up @@ -320,11 +325,8 @@ public boolean isValidSolution(SolutionCompleteGraph solution) {
//
if(mapping){
for (int i = 0; i < heap; i++) {

int indexNodeQuery = solution.getIndexQueryFromHeap(i);

int indexNodeBase = solution.getIndexCorrespondingBaseNode(indexNodeQuery);

if(!areNodesMapping(indexNodeQuery, indexNodeBase)) {
mapping = false;
break;
Expand Down Expand Up @@ -467,10 +469,17 @@ public float getSimilarity(SolutionCompleteGraph solution) {
}


double sumPairwiseMapping = 0;
// double sumPairwiseMapping = 0;

// double productPairwiseMapping = 0;

int cc=0;

int nMappings = ((heap * heap)-heap) / 2;

double [] arrMappingWeights = new double[nMappings];
double [] arrSimilarityWeighted = new double[nMappings];

for (int i = 0; i < heap; i++) {

int indexNode1Query = solution.getIndexQueryFromHeap(i);
Expand All @@ -484,17 +493,25 @@ public float getSimilarity(SolutionCompleteGraph solution) {

double scorePairwiseMapping = getScorePairwiseMapping(indexNode1Query, indexNode2Query, indexNode1Base, indexNode2Base);

sumPairwiseMapping += scorePairwiseMapping;
double w =
mdhvQuery.getWeightPharmacophorePoint(indexNode1Query)
* mdhvQuery.getWeightPharmacophorePoint(indexNode2Query);

arrMappingWeights[cc]=w;

arrSimilarityWeighted[cc++]=scorePairwiseMapping * w;

if(verbose) {
System.out.println("scorePairwiseMapping " + Formatter.format2(scorePairwiseMapping));
}
}
}

double mappings = ((heap * heap)-heap) / 2.0;

avrPairwiseMappingScaled = sumPairwiseMapping/mappings;
// double mappings = ((heap * heap)-heap) / 2.0;

double sumMappingWeights = ArrayUtilsCalc.sum(arrMappingWeights);
double sumSimilarityWeighted = ArrayUtilsCalc.sum(arrSimilarityWeighted);

avrPairwiseMappingScaled = sumSimilarityWeighted/sumMappingWeights;

coverageQuery = getRatioMinimumSpanningTreeQuery(solution);

Expand Down Expand Up @@ -633,16 +650,19 @@ public float getSimilarityNodes(SolutionCompleteGraph solution) {
return (float)similarity;
}

double sumSimilarityNodes = 0;
double sumSimilarityNodesWeighted = 0;

double sumWeights = 0;
for (int i = 0; i < heap; i++) {
int indexNodeQuery = solution.getIndexQueryFromHeap(i);
double w = mdhvQuery.getWeightPharmacophorePoint(indexNodeQuery);
int indexNodeBase = solution.getIndexCorrespondingBaseNode(indexNodeQuery);
double similarityNodePair = getSimilarityNodes(indexNodeQuery, indexNodeBase);
sumSimilarityNodes += similarityNodePair;
double similarityNodePairWeighted = getSimilarityNodes(indexNodeQuery, indexNodeBase)*w;
sumSimilarityNodesWeighted += similarityNodePairWeighted;
sumWeights+=w;
}

double mappings = heap;
avrPairwiseMappingScaled = sumSimilarityNodes/mappings;
avrPairwiseMappingScaled = sumSimilarityNodesWeighted / sumWeights;
coverageQuery = 0;
coverageBase = 0;
double ratioNodesMatchQuery = Math.min(nodesQuery, heap) / (double)Math.max(nodesQuery, heap);
Expand Down

This file was deleted.

0 comments on commit eafbb27

Please sign in to comment.