Compare commits
29 Commits
d1a56c3578
...
5bd1e568a6
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5bd1e568a6 | ||
|
|
4ad1979c18 | ||
|
|
423c9d5c93 | ||
|
|
7c3c95ab4b | ||
|
|
d71a99555c | ||
|
|
2bf2a9f5f7 | ||
|
|
810abdb705 | ||
|
|
f7b3c133bf | ||
|
|
14fcfe1ff3 | ||
|
|
70fec95a00 | ||
|
|
077af3b46e | ||
|
|
db99c74810 | ||
|
|
13a1af1f71 | ||
|
|
199c81f983 | ||
|
|
19a2a35f07 | ||
|
|
36c628cde5 | ||
|
|
1ddac63b0a | ||
|
|
e795b4cdd0 | ||
|
|
60cf6775c2 | ||
|
|
8a8c89c9ba | ||
|
|
86371668d5 | ||
|
|
d81ab25a68 | ||
|
|
02c8e6aacb | ||
|
|
f84dfb2b4b | ||
|
|
184278b72e | ||
|
|
489369f533 | ||
|
|
fbee591273 | ||
|
|
603a999b59 | ||
|
|
c3df4b12ab |
16
readme.md
16
readme.md
@@ -267,6 +267,8 @@ using the (2021 corrected) formula from the original pairSEQ paper. (Howie, et a
|
||||
|
||||
## PERFORMANCE
|
||||
|
||||
(NOTE: These results are from an older, less efficient version of the simulator, and need to be updated.)
|
||||
|
||||
On a home computer with a Ryzen 5600X CPU, 64GB of 3200MHz DDR4 RAM (half of which was allocated to the Java Virtual Machine), and a PCIe 3.0 SSD, running Linux Mint 20.3 Edge (5.13 kernel),
|
||||
the author ran a BiGpairSEQ simulation of a 96-well sample plate with 30,000 T cells/well comprising ~11,800 alphas and betas,
|
||||
taken from a sample of 4,000,000 distinct cells with an exponential frequency distribution (lambda 0.6).
|
||||
@@ -347,16 +349,24 @@ roughly as though it had a constant well population equal to the plate's average
|
||||
* ~~Apache Commons CSV library writes entries a row at a time~~
|
||||
* _Got this working, but at the cost of a profoundly strange bug in graph occupancy filtering. Have reverted the repo until I can figure out what caused that. Given how easily Thingiverse transposes CSV matrices in R, might not even be worth fixing.
|
||||
* ~~Enable GraphML output in addition to serialized object binaries, for data portability~~ DONE
|
||||
* ~~Custom vertex type with attribute for sequence occupancy?~~ DONE
|
||||
* Advantage: would eliminate the need to use maps to associate vertices with sequences, which would make the code easier to understand.
|
||||
* ~~Have a branch where this is implemented, but there's a bug that broke matching. Don't currently have time to fix.~~
|
||||
* ~~Have a branch where this is implemented, but there's a bug that broke matching. Don't currently have time to fix.~~
|
||||
* ~~Re-implement command line arguments, to enable scripting and statistical simulation studies~~ DONE
|
||||
* ~~Implement custom Vertex class to simplify code and make it easier to implement different MWM algorithms~~ DONE
|
||||
* Advantage: would eliminate the need to use maps to associate vertices with sequences, which would make the code easier to understand.
|
||||
* This also seems to be faster when using the same algorithm than the version with lots of maps, which is a nice bonus!
|
||||
* Re-implement CDR1 matching method
|
||||
* ~~Implement simulation of read depth, and of read errors. Pre-filter graph for difference in read count to eliminate spurious sequences.~~ DONE
|
||||
* Pre-filtering based on comparing (read depth) * (occupancy) to (read count) for each sequence works extremely well
|
||||
* ~~Add read depth simulation options to CLI~~ DONE
|
||||
* Update matching metadata output options in CLI
|
||||
* Update performance data in this readme
|
||||
* Refactor simulator code to collect all needed data in a single scan of the plate
|
||||
* Currently it scans once for the vertices and then again for the edge weights. This made simulating read depth awkward, and incompatible with caching of plate files.
|
||||
* This would be a fairly major rewrite of the simulator code, but could make things faster, and would definitely make them cleaner.
|
||||
* Implement Duan and Su's maximum weight matching algorithm
|
||||
* Add controllable algorithm-type parameter?
|
||||
* This would be fun and valuable, but probably take more time than I have for a hobby project.
|
||||
* Implement an auction algorithm for maximum weight matching
|
||||
* Implement an algorithm for approximating a maximum weight matching
|
||||
* Some of these run in linear or near-linear time
|
||||
* given that the underlying biological samples have many, many sources of error, this would probably be the most useful option in practice. It seems less mathematically elegant, though, and so less fun for me.
|
||||
|
||||
@@ -12,7 +12,7 @@ import java.util.List;
|
||||
public class CellFileReader {
|
||||
|
||||
private String filename;
|
||||
private List<Integer[]> distinctCells = new ArrayList<>();
|
||||
private List<String[]> distinctCells = new ArrayList<>();
|
||||
private Integer cdr1Freq;
|
||||
|
||||
public CellFileReader(String filename) {
|
||||
@@ -32,11 +32,11 @@ public class CellFileReader {
|
||||
CSVParser parser = new CSVParser(reader, cellFileFormat);
|
||||
){
|
||||
for(CSVRecord record: parser.getRecords()) {
|
||||
Integer[] cell = new Integer[4];
|
||||
cell[0] = Integer.valueOf(record.get("Alpha CDR3"));
|
||||
cell[1] = Integer.valueOf(record.get("Beta CDR3"));
|
||||
cell[2] = Integer.valueOf(record.get("Alpha CDR1"));
|
||||
cell[3] = Integer.valueOf(record.get("Beta CDR1"));
|
||||
String[] cell = new String[4];
|
||||
cell[0] = record.get("Alpha CDR3");
|
||||
cell[1] = record.get("Beta CDR3");
|
||||
cell[2] = record.get("Alpha CDR1");
|
||||
cell[3] = record.get("Beta CDR1");
|
||||
distinctCells.add(cell);
|
||||
}
|
||||
|
||||
@@ -47,8 +47,8 @@ public class CellFileReader {
|
||||
}
|
||||
|
||||
//get CDR1 frequency
|
||||
ArrayList<Integer> cdr1Alphas = new ArrayList<>();
|
||||
for (Integer[] cell : distinctCells) {
|
||||
ArrayList<String> cdr1Alphas = new ArrayList<>();
|
||||
for (String[] cell : distinctCells) {
|
||||
cdr1Alphas.add(cell[3]);
|
||||
}
|
||||
double count = cdr1Alphas.stream().distinct().count();
|
||||
@@ -62,14 +62,4 @@ public class CellFileReader {
|
||||
}
|
||||
|
||||
public String getFilename() { return filename;}
|
||||
|
||||
//Refactor everything that uses this to have access to a Cell Sample and get the cells there instead.
|
||||
public List<Integer[]> getListOfDistinctCellsDEPRECATED(){
|
||||
return distinctCells;
|
||||
}
|
||||
|
||||
public Integer getCellCountDEPRECATED() {
|
||||
//Refactor everything that uses this to have access to a Cell Sample and get the count there instead.
|
||||
return distinctCells.size();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,7 +11,7 @@ import java.util.List;
|
||||
public class CellFileWriter {
|
||||
|
||||
private String[] headers = {"Alpha CDR3", "Beta CDR3", "Alpha CDR1", "Beta CDR1"};
|
||||
List<Integer[]> cells;
|
||||
List<String[]> cells;
|
||||
String filename;
|
||||
Integer cdr1Freq;
|
||||
|
||||
@@ -35,7 +35,7 @@ public class CellFileWriter {
|
||||
printer.printComment("Sample contains 1 unique CDR1 for every " + cdr1Freq + "unique CDR3s.");
|
||||
printer.printRecords(cells);
|
||||
} catch(IOException ex){
|
||||
System.out.println("Could not make new file named "+filename);
|
||||
System.out.println("Could not make new file named " + filename);
|
||||
System.err.println(ex);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ import java.util.stream.IntStream;
|
||||
|
||||
public class CellSample {
|
||||
|
||||
private List<Integer[]> cells;
|
||||
private List<String[]> cells;
|
||||
private Integer cdr1Freq;
|
||||
|
||||
public CellSample(Integer numDistinctCells, Integer cdr1Freq){
|
||||
@@ -24,28 +24,28 @@ public class CellSample {
|
||||
|
||||
//Each cell represented by 4 values
|
||||
//two CDR3s, and two CDR1s. First two values are CDR3s (alpha, beta), second two are CDR1s (alpha, beta)
|
||||
List<Integer[]> distinctCells = new ArrayList<>();
|
||||
List<String[]> distinctCells = new ArrayList<>();
|
||||
for(int i = 0; i < numbersCDR3.size() - 1; i = i + 2){
|
||||
//Go through entire CDR3 list once, make pairs of alphas and betas
|
||||
Integer tmpCDR3a = numbersCDR3.get(i);
|
||||
Integer tmpCDR3b = numbersCDR3.get(i+1);
|
||||
//Go through (likely shorter) CDR1 list as many times as necessary, make pairs of alphas and betas
|
||||
Integer tmpCDR1a = numbersCDR1.get(i % numbersCDR1.size());
|
||||
Integer tmpCDR1b = numbersCDR1.get((i+1) % numbersCDR1.size());
|
||||
String tmpCDR3a = numbersCDR3.get(i).toString();
|
||||
String tmpCDR3b = numbersCDR3.get(i+1).toString();
|
||||
//Go through the (likely shorter) CDR1 list as many times as necessary, make pairs of alphas and betas
|
||||
String tmpCDR1a = numbersCDR1.get(i % numbersCDR1.size()).toString();
|
||||
String tmpCDR1b = numbersCDR1.get((i+1) % numbersCDR1.size()).toString();
|
||||
//Make the array representing the cell
|
||||
Integer[] tmp = {tmpCDR3a, tmpCDR3b, tmpCDR1a, tmpCDR1b};
|
||||
String[] tmp = {tmpCDR3a, tmpCDR3b, tmpCDR1a, tmpCDR1b};
|
||||
//Add the cell to the list of distinct cells
|
||||
distinctCells.add(tmp);
|
||||
}
|
||||
this.cells = distinctCells;
|
||||
}
|
||||
|
||||
public CellSample(List<Integer[]> cells, Integer cdr1Freq){
|
||||
public CellSample(List<String[]> cells, Integer cdr1Freq){
|
||||
this.cells = cells;
|
||||
this.cdr1Freq = cdr1Freq;
|
||||
}
|
||||
|
||||
public List<Integer[]> getCells(){
|
||||
public List<String[]> getCells(){
|
||||
return cells;
|
||||
}
|
||||
|
||||
|
||||
@@ -35,6 +35,10 @@ import java.util.stream.Stream;
|
||||
* output : name of the output file
|
||||
* graphml : output a graphml file
|
||||
* binary : output a serialized binary object file
|
||||
* IF SIMULATING READ DEPTH, ALL THESE ARE REQUIRED. Absence indicates not simulating read depth
|
||||
* readdepth: number of reads per sequence
|
||||
* readerrorprob: probability of reading a sequence incorrectly
|
||||
* errcollisionprob: probability of two read errors being identical
|
||||
*
|
||||
* Match flags:
|
||||
* graphFile : name of graph and data file to use as input
|
||||
@@ -142,7 +146,20 @@ public class CommandLineInterface {
|
||||
CellSample cells = getCells(cellFilename);
|
||||
//get plate
|
||||
Plate plate = getPlate(plateFilename);
|
||||
GraphWithMapData graph = Simulator.makeGraph(cells, plate, false);
|
||||
GraphWithMapData graph;
|
||||
Integer readDepth = 1;
|
||||
Double readErrorRate = 0.0;
|
||||
Double errorCollisionRate = 0.0;
|
||||
if (line.hasOption("rd")) {
|
||||
readDepth = Integer.parseInt(line.getOptionValue("rd"));
|
||||
}
|
||||
if (line.hasOption("err")) {
|
||||
readErrorRate = Double.parseDouble(line.getOptionValue("err"));
|
||||
}
|
||||
if (line.hasOption("coll")) {
|
||||
errorCollisionRate = Double.parseDouble(line.getOptionValue("coll"));
|
||||
}
|
||||
graph = Simulator.makeGraph(cells, plate, readDepth, readErrorRate, errorCollisionRate, false);
|
||||
if (!line.hasOption("no-binary")) { //output binary file unless told not to
|
||||
GraphDataObjectWriter writer = new GraphDataObjectWriter(outputFilename, graph, false);
|
||||
writer.writeDataToFile();
|
||||
@@ -384,11 +401,32 @@ public class CommandLineInterface {
|
||||
.longOpt("no-binary")
|
||||
.desc("(Optional) Don't output serialized binary file")
|
||||
.build();
|
||||
Option readDepth = Option.builder("rd")
|
||||
.longOpt("read-depth")
|
||||
.desc("(Optional) The number of times to read each sequence.")
|
||||
.hasArg()
|
||||
.argName("depth")
|
||||
.build();
|
||||
Option readErrorProb = Option.builder("err")
|
||||
.longOpt("read-error-prob")
|
||||
.desc("(Optional) The probability that a sequence will be misread. (0.0 - 1.0)")
|
||||
.hasArg()
|
||||
.argName("prob")
|
||||
.build();
|
||||
Option errorCollisionProb = Option.builder("coll")
|
||||
.longOpt("error-collision-prob")
|
||||
.desc("(Optional) The probability that two misreads will produce the same spurious sequence. (0.0 - 1.0)")
|
||||
.hasArg()
|
||||
.argName("prob")
|
||||
.build();
|
||||
graphOptions.addOption(cellFilename);
|
||||
graphOptions.addOption(plateFilename);
|
||||
graphOptions.addOption(outputFileOption());
|
||||
graphOptions.addOption(outputGraphML);
|
||||
graphOptions.addOption(outputSerializedBinary);
|
||||
graphOptions.addOption(readDepth);
|
||||
graphOptions.addOption(readErrorProb);
|
||||
graphOptions.addOption(errorCollisionProb);
|
||||
return graphOptions;
|
||||
}
|
||||
|
||||
|
||||
@@ -69,6 +69,7 @@ public class GraphMLFileWriter {
|
||||
//Set graph attributes
|
||||
exporter.setGraphAttributeProvider( () -> graphAttributes);
|
||||
//set type, sequence, and occupancy attributes for each vertex
|
||||
//NEED TO ADD NEW FIELD FOR READ COUNT
|
||||
exporter.setVertexAttributeProvider( v -> {
|
||||
Map<String, Attribute> attributes = new HashMap<>();
|
||||
attributes.put("type", DefaultAttribute.createAttribute(v.getType().name()));
|
||||
|
||||
@@ -12,7 +12,6 @@ public interface GraphModificationFunctions {
|
||||
static Map<Vertex[], Integer> filterByOverlapThresholds(SimpleWeightedGraph<Vertex, DefaultWeightedEdge> graph,
|
||||
int low, int high, boolean saveEdges) {
|
||||
Map<Vertex[], Integer> removedEdges = new HashMap<>();
|
||||
//List<Integer[]> removedEdges = new ArrayList<>();
|
||||
for (DefaultWeightedEdge e : graph.edgeSet()) {
|
||||
if ((graph.getEdgeWeight(e) > high) || (graph.getEdgeWeight(e) < low)) {
|
||||
if(saveEdges) {
|
||||
@@ -94,6 +93,38 @@ public interface GraphModificationFunctions {
|
||||
return removedEdges;
|
||||
}
|
||||
|
||||
static Map<Vertex[], Integer> filterByRelativeReadCount (SimpleWeightedGraph<Vertex, DefaultWeightedEdge> graph, Integer threshold, boolean saveEdges) {
|
||||
Map<Vertex[], Integer> removedEdges = new HashMap<>();
|
||||
Boolean passes;
|
||||
for (DefaultWeightedEdge e : graph.edgeSet()) {
|
||||
Integer alphaReadCount = graph.getEdgeSource(e).getReadCount();
|
||||
Integer betaReadCount = graph.getEdgeTarget(e).getReadCount();
|
||||
passes = RelativeReadCountFilterFunction(threshold, alphaReadCount, betaReadCount);
|
||||
if (!passes) {
|
||||
if (saveEdges) {
|
||||
Vertex source = graph.getEdgeSource(e);
|
||||
Vertex target = graph.getEdgeTarget(e);
|
||||
Integer intWeight = (int) graph.getEdgeWeight(e);
|
||||
Vertex[] edge = {source, target};
|
||||
removedEdges.put(edge, intWeight);
|
||||
}
|
||||
else {
|
||||
graph.setEdgeWeight(e, 0.0);
|
||||
}
|
||||
}
|
||||
}
|
||||
if(saveEdges) {
|
||||
for (Vertex[] edge : removedEdges.keySet()) {
|
||||
graph.removeEdge(edge[0], edge[1]);
|
||||
}
|
||||
}
|
||||
return removedEdges;
|
||||
}
|
||||
|
||||
static Boolean RelativeReadCountFilterFunction(Integer threshold, Integer alphaReadCount, Integer betaReadCount) {
|
||||
return Math.abs(alphaReadCount - betaReadCount) < threshold;
|
||||
}
|
||||
|
||||
static void addRemovedEdges(SimpleWeightedGraph<Vertex, DefaultWeightedEdge> graph,
|
||||
Map<Vertex[], Integer> removedEdges) {
|
||||
for (Vertex[] edge : removedEdges.keySet()) {
|
||||
@@ -102,4 +133,6 @@ public interface GraphModificationFunctions {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
@@ -15,7 +15,10 @@ public class GraphWithMapData implements java.io.Serializable {
|
||||
private Integer[] wellPopulations;
|
||||
private Integer alphaCount;
|
||||
private Integer betaCount;
|
||||
private final Map<Integer, Integer> distCellsMapAlphaKey;
|
||||
private int readDepth;
|
||||
private double readErrorRate;
|
||||
private double errorCollisionRate;
|
||||
private final Map<String, String> distCellsMapAlphaKey;
|
||||
// private final Map<Integer, Integer> plateVtoAMap;
|
||||
// private final Map<Integer, Integer> plateVtoBMap;
|
||||
// private final Map<Integer, Integer> plateAtoVMap;
|
||||
@@ -25,7 +28,8 @@ public class GraphWithMapData implements java.io.Serializable {
|
||||
private final Duration time;
|
||||
|
||||
public GraphWithMapData(SimpleWeightedGraph graph, Integer numWells, Integer[] wellConcentrations,
|
||||
Map<Integer, Integer> distCellsMapAlphaKey, Integer alphaCount, Integer betaCount, Duration time){
|
||||
Map<String, String> distCellsMapAlphaKey, Integer alphaCount, Integer betaCount,
|
||||
Integer readDepth, Double readErrorRate, Double errorCollisionRate, Duration time){
|
||||
|
||||
// Map<Integer, Integer> plateVtoAMap,
|
||||
// Map<Integer,Integer> plateVtoBMap, Map<Integer, Integer> plateAtoVMap,
|
||||
@@ -43,6 +47,9 @@ public class GraphWithMapData implements java.io.Serializable {
|
||||
// this.plateBtoVMap = plateBtoVMap;
|
||||
// this.alphaWellCounts = alphaWellCounts;
|
||||
// this.betaWellCounts = betaWellCounts;
|
||||
this.readDepth = readDepth;
|
||||
this.readErrorRate = readErrorRate;
|
||||
this.errorCollisionRate = errorCollisionRate;
|
||||
this.time = time;
|
||||
}
|
||||
|
||||
@@ -66,7 +73,7 @@ public class GraphWithMapData implements java.io.Serializable {
|
||||
return betaCount;
|
||||
}
|
||||
|
||||
public Map<Integer, Integer> getDistCellsMapAlphaKey() {
|
||||
public Map<String, String> getDistCellsMapAlphaKey() {
|
||||
return distCellsMapAlphaKey;
|
||||
}
|
||||
|
||||
@@ -94,6 +101,8 @@ public class GraphWithMapData implements java.io.Serializable {
|
||||
// return betaWellCounts;
|
||||
// }
|
||||
|
||||
public Integer getReadDepth() { return readDepth; }
|
||||
|
||||
public Duration getTime() {
|
||||
return time;
|
||||
}
|
||||
@@ -105,4 +114,12 @@ public class GraphWithMapData implements java.io.Serializable {
|
||||
public String getSourceFilename() {
|
||||
return sourceFilename;
|
||||
}
|
||||
|
||||
public Double getReadErrorRate() {
|
||||
return readErrorRate;
|
||||
}
|
||||
|
||||
public Double getErrorCollisionRate() {
|
||||
return errorCollisionRate;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -250,6 +250,11 @@ public class InteractiveInterface {
|
||||
String filename = null;
|
||||
String cellFile = null;
|
||||
String plateFile = null;
|
||||
Boolean simulateReadDepth = false;
|
||||
//number of times to read each sequence in a well
|
||||
int readDepth = 1;
|
||||
double readErrorRate = 0.0;
|
||||
double errorCollisionRate = 0.0;
|
||||
try {
|
||||
String str = "\nGenerating bipartite weighted graph encoding occupancy overlap data ";
|
||||
str = str.concat("\nrequires a cell sample file and a sample plate file.");
|
||||
@@ -258,6 +263,35 @@ public class InteractiveInterface {
|
||||
cellFile = sc.next();
|
||||
System.out.print("\nPlease enter name of an existing sample plate file: ");
|
||||
plateFile = sc.next();
|
||||
System.out.println("\nEnable simulation of sequence read depth and sequence read errors? (y/n)");
|
||||
System.out.println("NOTE: sample plate data cannot be cached when simulating read errors");
|
||||
String ans = sc.next();
|
||||
Pattern pattern = Pattern.compile("(?:yes|y)", Pattern.CASE_INSENSITIVE);
|
||||
Matcher matcher = pattern.matcher(ans);
|
||||
if(matcher.matches()){
|
||||
simulateReadDepth = true;
|
||||
}
|
||||
if (simulateReadDepth) {
|
||||
BiGpairSEQ.setCachePlate(false);
|
||||
BiGpairSEQ.clearPlateInMemory();
|
||||
System.out.print("\nPlease enter read depth (the integer number of reads per sequence): ");
|
||||
readDepth = sc.nextInt();
|
||||
if(readDepth < 1) {
|
||||
throw new InputMismatchException("The read depth must be an integer >= 1");
|
||||
}
|
||||
System.out.print("\nPlease enter probability of a sequence read error (0.0 to 1.0): ");
|
||||
readErrorRate = sc.nextDouble();
|
||||
if(readErrorRate < 0.0 || readErrorRate > 1.0) {
|
||||
throw new InputMismatchException("The read error rate must be in the range [0.0, 1.0]");
|
||||
}
|
||||
System.out.println("\nPlease enter the probability of read error collision");
|
||||
System.out.println("(the likelihood that two read errors produce the same spurious sequence)");
|
||||
System.out.print("(0.0 to 1.0): ");
|
||||
errorCollisionRate = sc.nextDouble();
|
||||
if(errorCollisionRate < 0.0 || errorCollisionRate > 1.0) {
|
||||
throw new InputMismatchException("The error collision probability must be an in the range [0.0, 1.0]");
|
||||
}
|
||||
}
|
||||
System.out.println("\nThe graph and occupancy data will be written to a file.");
|
||||
System.out.print("Please enter a name for the output file: ");
|
||||
filename = sc.next();
|
||||
@@ -304,7 +338,7 @@ public class InteractiveInterface {
|
||||
System.out.println("Returning to main menu.");
|
||||
}
|
||||
else{
|
||||
GraphWithMapData data = Simulator.makeGraph(cellSample, plate, true);
|
||||
GraphWithMapData data = Simulator.makeGraph(cellSample, plate, readDepth, readErrorRate, errorCollisionRate, true);
|
||||
assert filename != null;
|
||||
if(BiGpairSEQ.outputBinary()) {
|
||||
GraphDataObjectWriter dataWriter = new GraphDataObjectWriter(filename, data);
|
||||
|
||||
@@ -9,27 +9,34 @@ public class MatchingResult {
|
||||
private final List<String> comments;
|
||||
private final List<String> headers;
|
||||
private final List<List<String>> allResults;
|
||||
private final Map<Integer, Integer> matchMap;
|
||||
private final Duration time;
|
||||
private final Map<String, String> matchMap;
|
||||
|
||||
public MatchingResult(Map<String, String> metadata, List<String> headers,
|
||||
List<List<String>> allResults, Map<Integer, Integer>matchMap, Duration time){
|
||||
List<List<String>> allResults, Map<String, String>matchMap){
|
||||
/*
|
||||
* POSSIBLE KEYS FOR METADATA MAP ARE:
|
||||
* sample plate filename *
|
||||
* graph filename *
|
||||
* matching weight *
|
||||
* well populations *
|
||||
* total alphas found *
|
||||
* total betas found *
|
||||
* high overlap threshold *
|
||||
* low overlap threshold *
|
||||
* maximum occupancy difference *
|
||||
* minimum overlap percent *
|
||||
* sequence read depth *
|
||||
* sequence read error rate *
|
||||
* read error collision rate *
|
||||
* total alphas read from plate *
|
||||
* total betas read from plate *
|
||||
* alphas in graph (after pre-filtering) *
|
||||
* betas in graph (after pre-filtering) *
|
||||
* high overlap threshold for pairing *
|
||||
* low overlap threshold for pairing *
|
||||
* maximum occupancy difference for pairing *
|
||||
* minimum overlap percent for pairing *
|
||||
* pairing attempt rate *
|
||||
* correct pairing count *
|
||||
* incorrect pairing count *
|
||||
* pairing error rate *
|
||||
* simulation time (seconds)
|
||||
* time to generate graph (seconds) *
|
||||
* time to pair sequences (seconds) *
|
||||
* total simulation time (seconds) *
|
||||
*/
|
||||
this.metadata = metadata;
|
||||
this.comments = new ArrayList<>();
|
||||
@@ -39,8 +46,6 @@ public class MatchingResult {
|
||||
this.headers = headers;
|
||||
this.allResults = allResults;
|
||||
this.matchMap = matchMap;
|
||||
this.time = time;
|
||||
|
||||
}
|
||||
|
||||
public Map<String, String> getMetadata() {return metadata;}
|
||||
@@ -57,13 +62,13 @@ public class MatchingResult {
|
||||
return headers;
|
||||
}
|
||||
|
||||
public Map<Integer, Integer> getMatchMap() {
|
||||
public Map<String, String> getMatchMap() {
|
||||
return matchMap;
|
||||
}
|
||||
|
||||
public Duration getTime() {
|
||||
return time;
|
||||
}
|
||||
// public Duration getTime() {
|
||||
// return time;
|
||||
// }
|
||||
|
||||
public String getPlateFilename() {
|
||||
return metadata.get("sample plate filename");
|
||||
@@ -84,20 +89,20 @@ public class MatchingResult {
|
||||
}
|
||||
|
||||
public Integer getAlphaCount() {
|
||||
return Integer.parseInt(metadata.get("total alpha count"));
|
||||
return Integer.parseInt(metadata.get("total alphas read from plate"));
|
||||
}
|
||||
|
||||
public Integer getBetaCount() {
|
||||
return Integer.parseInt(metadata.get("total beta count"));
|
||||
return Integer.parseInt(metadata.get("total betas read from plate"));
|
||||
}
|
||||
|
||||
public Integer getHighOverlapThreshold() { return Integer.parseInt(metadata.get("high overlap threshold"));}
|
||||
public Integer getHighOverlapThreshold() { return Integer.parseInt(metadata.get("high overlap threshold for pairing"));}
|
||||
|
||||
public Integer getLowOverlapThreshold() { return Integer.parseInt(metadata.get("low overlap threshold"));}
|
||||
public Integer getLowOverlapThreshold() { return Integer.parseInt(metadata.get("low overlap threshold for pairing"));}
|
||||
|
||||
public Integer getMaxOccupancyDifference() { return Integer.parseInt(metadata.get("maximum occupancy difference"));}
|
||||
public Integer getMaxOccupancyDifference() { return Integer.parseInt(metadata.get("maximum occupancy difference for pairing"));}
|
||||
|
||||
public Integer getMinOverlapPercent() { return Integer.parseInt(metadata.get("minimum overlap percent"));}
|
||||
public Integer getMinOverlapPercent() { return Integer.parseInt(metadata.get("minimum overlap percent for pairing"));}
|
||||
|
||||
public Double getPairingAttemptRate() { return Double.parseDouble(metadata.get("pairing attempt rate"));}
|
||||
|
||||
@@ -107,6 +112,6 @@ public class MatchingResult {
|
||||
|
||||
public Double getPairingErrorRate() { return Double.parseDouble(metadata.get("pairing error rate"));}
|
||||
|
||||
public String getSimulationTime() { return metadata.get("simulation time (seconds)"); }
|
||||
public String getSimulationTime() { return metadata.get("total simulation time (seconds)"); }
|
||||
|
||||
}
|
||||
|
||||
@@ -5,13 +5,14 @@ TODO: Implement exponential distribution using inversion method - DONE
|
||||
TODO: Implement discrete frequency distributions using Vose's Alias Method
|
||||
*/
|
||||
|
||||
|
||||
import java.util.*;
|
||||
|
||||
public class Plate {
|
||||
private CellSample cells;
|
||||
private String sourceFile;
|
||||
private String filename;
|
||||
private List<List<Integer[]>> wells;
|
||||
private List<List<String[]>> wells;
|
||||
private final Random rand = BiGpairSEQ.getRand();
|
||||
private int size;
|
||||
private double error;
|
||||
@@ -48,13 +49,13 @@ public class Plate {
|
||||
}
|
||||
|
||||
//constructor for returning a Plate from a PlateFileReader
|
||||
public Plate(String filename, List<List<Integer[]>> wells) {
|
||||
public Plate(String filename, List<List<String[]>> wells) {
|
||||
this.filename = filename;
|
||||
this.wells = wells;
|
||||
this.size = wells.size();
|
||||
|
||||
List<Integer> concentrations = new ArrayList<>();
|
||||
for (List<Integer[]> w: wells) {
|
||||
for (List<String[]> w: wells) {
|
||||
if(!concentrations.contains(w.size())){
|
||||
concentrations.add(w.size());
|
||||
}
|
||||
@@ -65,7 +66,7 @@ public class Plate {
|
||||
}
|
||||
}
|
||||
|
||||
private void fillWellsExponential(List<Integer[]> cells, double lambda){
|
||||
private void fillWellsExponential(List<String[]> cells, double lambda){
|
||||
this.lambda = lambda;
|
||||
exponential = true;
|
||||
int numSections = populations.length;
|
||||
@@ -74,17 +75,17 @@ public class Plate {
|
||||
int n;
|
||||
while (section < numSections){
|
||||
for (int i = 0; i < (size / numSections); i++) {
|
||||
List<Integer[]> well = new ArrayList<>();
|
||||
List<String[]> well = new ArrayList<>();
|
||||
for (int j = 0; j < populations[section]; j++) {
|
||||
do {
|
||||
//inverse transform sampling: for random number u in [0,1), x = log(1-u) / (-lambda)
|
||||
m = (Math.log10((1 - rand.nextDouble()))/(-lambda)) * Math.sqrt(cells.size());
|
||||
} while (m >= cells.size() || m < 0);
|
||||
n = (int) Math.floor(m);
|
||||
Integer[] cellToAdd = cells.get(n).clone();
|
||||
String[] cellToAdd = cells.get(n).clone();
|
||||
for(int k = 0; k < cellToAdd.length; k++){
|
||||
if(Math.abs(rand.nextDouble()) < error){//error applied to each seqeunce
|
||||
cellToAdd[k] = -1;
|
||||
if(Math.abs(rand.nextDouble()) <= error){//error applied to each sequence
|
||||
cellToAdd[k] = "-1";
|
||||
}
|
||||
}
|
||||
well.add(cellToAdd);
|
||||
@@ -95,7 +96,7 @@ public class Plate {
|
||||
}
|
||||
}
|
||||
|
||||
private void fillWells( List<Integer[]> cells, double stdDev) {
|
||||
private void fillWells( List<String[]> cells, double stdDev) {
|
||||
this.stdDev = stdDev;
|
||||
int numSections = populations.length;
|
||||
int section = 0;
|
||||
@@ -103,16 +104,16 @@ public class Plate {
|
||||
int n;
|
||||
while (section < numSections){
|
||||
for (int i = 0; i < (size / numSections); i++) {
|
||||
List<Integer[]> well = new ArrayList<>();
|
||||
List<String[]> well = new ArrayList<>();
|
||||
for (int j = 0; j < populations[section]; j++) {
|
||||
do {
|
||||
m = (rand.nextGaussian() * stdDev) + (cells.size() / 2);
|
||||
} while (m >= cells.size() || m < 0);
|
||||
n = (int) Math.floor(m);
|
||||
Integer[] cellToAdd = cells.get(n).clone();
|
||||
String[] cellToAdd = cells.get(n).clone();
|
||||
for(int k = 0; k < cellToAdd.length; k++){
|
||||
if(Math.abs(rand.nextDouble()) < error){//error applied to each sequence
|
||||
cellToAdd[k] = -1;
|
||||
cellToAdd[k] = "-1";
|
||||
}
|
||||
}
|
||||
well.add(cellToAdd);
|
||||
@@ -143,40 +144,131 @@ public class Plate {
|
||||
return error;
|
||||
}
|
||||
|
||||
public List<List<Integer[]>> getWells() {
|
||||
public List<List<String[]>> getWells() {
|
||||
return wells;
|
||||
}
|
||||
|
||||
//returns a map of the counts of the sequence at cell index sIndex, in all wells
|
||||
public Map<Integer, Integer> assayWellsSequenceS(int... sIndices){
|
||||
return this.assayWellsSequenceS(0, size, sIndices);
|
||||
public void assayWellsSequenceS(Map<String, Integer> sequences, int... sIndices){
|
||||
this.assayWellsSequenceS(sequences, 0, size, sIndices);
|
||||
}
|
||||
|
||||
//returns a map of the counts of the sequence at cell index sIndex, in a specific well
|
||||
public Map<Integer, Integer> assayWellsSequenceS(int n, int... sIndices) { return this.assayWellsSequenceS(n, n+1, sIndices);}
|
||||
public void assayWellsSequenceS(Map<String, Integer> sequences, int n, int... sIndices) {
|
||||
this.assayWellsSequenceS(sequences, n, n+1, sIndices);
|
||||
}
|
||||
|
||||
//returns a map of the counts of the sequence at cell index sIndex, in a range of wells
|
||||
public Map<Integer, Integer> assayWellsSequenceS(int start, int end, int... sIndices) {
|
||||
Map<Integer,Integer> assay = new HashMap<>();
|
||||
public void assayWellsSequenceS(Map<String, Integer> sequences, int start, int end, int... sIndices) {
|
||||
for(int sIndex: sIndices){
|
||||
for(int i = start; i < end; i++){
|
||||
countSequences(assay, wells.get(i), sIndex);
|
||||
countSequences(sequences, wells.get(i), sIndex);
|
||||
}
|
||||
}
|
||||
return assay;
|
||||
}
|
||||
//For the sequences at cell indices sIndices, counts number of unique sequences in the given well into the given map
|
||||
private void countSequences(Map<Integer, Integer> wellMap, List<Integer[]> well, int... sIndices) {
|
||||
for(Integer[] cell : well) {
|
||||
private void countSequences(Map<String, Integer> wellMap, List<String[]> well, int... sIndices) {
|
||||
for(String[] cell : well) {
|
||||
for(int sIndex: sIndices){
|
||||
//skip dropout sequences, which have value -1
|
||||
if(cell[sIndex] != -1){
|
||||
if(!"-1".equals(cell[sIndex])){
|
||||
wellMap.merge(cell[sIndex], 1, (oldValue, newValue) -> oldValue + newValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//returns a map of the counts of the sequence at cell index sIndex, in all wells
|
||||
//Simulates read depth and read errors, counts the number of reads of a unique sequence into the given map.
|
||||
public void assayWellsSequenceSWithReadDepth(Map<String, Integer> misreadCounts, Map<String, Integer> occupancyMap, Map<String, Integer> readCountMap,
|
||||
int readDepth, double readErrorProb, double errorCollisionProb, int... sIndices) {
|
||||
this.assayWellsSequenceSWithReadDepth(misreadCounts, occupancyMap, readCountMap, readDepth, readErrorProb, errorCollisionProb, 0, size, sIndices);
|
||||
}
|
||||
//returns a map of the counts of the sequence at cell index sIndex, in a specific of wells
|
||||
//Simulates read depth and read errors, counts the number of reads of a unique sequence into the given map.
|
||||
public void assayWellsSequenceSWithReadDepth(Map<String, Integer> misreadCounts, Map<String, Integer> occupancyMap, Map<String, Integer> readCountMap,
|
||||
int readDepth, double readErrorProb, double errorCollisionProb,
|
||||
int n, int... sIndices) {
|
||||
this.assayWellsSequenceSWithReadDepth(misreadCounts, occupancyMap, readCountMap, readDepth, readErrorProb, errorCollisionProb, n, n+1, sIndices);
|
||||
}
|
||||
|
||||
//returns a map of the counts of the sequence at cell index sIndex, in a range of wells
|
||||
//Simulates read depth and read errors, counts the number of reads of a unique sequence into the given map.
|
||||
public void assayWellsSequenceSWithReadDepth(Map<String, Integer> misreadCounts, Map<String, Integer> occupancyMap, Map<String, Integer> readCountMap,
|
||||
int readDepth, double readErrorProb, double errorCollisionProb,
|
||||
int start, int end, int... sIndices) {
|
||||
for(int sIndex: sIndices){
|
||||
for(int i = start; i < end; i++){
|
||||
countSequencesWithReadDepth(misreadCounts, occupancyMap, readCountMap, readDepth, readErrorProb, errorCollisionProb, wells.get(i), sIndex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//For the sequences at cell indices sIndices, counts number of unique sequences in the given well into the given map
|
||||
//Simulates read depth and read errors, counts the number of reads of a unique sequence into the given map.
|
||||
//NOTE: this function changes the content of the well, adding spurious cells to contain the misread sequences
|
||||
//(this is necessary because, in the simulation, the plate is read multiple times, but random misreads can only
|
||||
//be simulated once).
|
||||
//(Possibly I should refactor all of this to only require a single plate assay, to speed things up. Or at least
|
||||
//to see if it would speed things up.)
|
||||
private void countSequencesWithReadDepth(Map<String, Integer> distinctMisreadCounts, Map<String, Integer> occupancyMap, Map<String, Integer> readCountMap,
|
||||
int readDepth, double readErrorProb, double errorCollisionProb,
|
||||
List<String[]> well, int... sIndices) {
|
||||
//list of spurious cells to add to well after counting
|
||||
List<String[]> spuriousCells = new ArrayList<>();
|
||||
for(String[] cell : well) {
|
||||
//new potential spurious cell for each cell that gets read
|
||||
String[] spuriousCell = new String[SequenceType.values().length];
|
||||
//initialize spurious cell with all dropout sequences
|
||||
Arrays.fill(spuriousCell, "-1");
|
||||
//has a read error occurred?
|
||||
boolean readError = false;
|
||||
for(int sIndex: sIndices){
|
||||
//skip dropout sequences, which have value "-1"
|
||||
if(!"-1".equals(cell[sIndex])){
|
||||
Map<String, Integer> sequencesWithReadCounts = new LinkedHashMap<>();
|
||||
for(int i = 0; i < readDepth; i++) {
|
||||
if (rand.nextDouble() <= readErrorProb) {
|
||||
readError = true;
|
||||
//Read errors are represented by appending "*"s to the end of the sequence some number of times
|
||||
StringBuilder spurious = new StringBuilder(cell[sIndex]);
|
||||
//if this sequence hasn't been misread before, or the read error is unique,
|
||||
//append one more "*" than has been appended before
|
||||
if (!distinctMisreadCounts.containsKey(cell[sIndex]) || rand.nextDouble() > errorCollisionProb) {
|
||||
distinctMisreadCounts.merge(cell[sIndex], 1, (oldValue, newValue) -> oldValue + newValue);
|
||||
for (int j = 0; j < distinctMisreadCounts.get(cell[sIndex]); j++) {
|
||||
spurious.append("*");
|
||||
}
|
||||
}
|
||||
//if this is a read error collision, randomly choose a number of "*"s that has been appended before
|
||||
else {
|
||||
int starCount = rand.nextInt(distinctMisreadCounts.get(cell[sIndex]));
|
||||
for (int j = 0; j < starCount; j++) {
|
||||
spurious.append("*");
|
||||
}
|
||||
}
|
||||
sequencesWithReadCounts.merge(spurious.toString(), 1, (oldValue, newValue) -> oldValue + newValue);
|
||||
//add spurious sequence to spurious cell
|
||||
spuriousCell[sIndex] = spurious.toString();
|
||||
}
|
||||
else {
|
||||
sequencesWithReadCounts.merge(cell[sIndex], 1, (oldValue, newValue) -> oldValue + newValue);
|
||||
}
|
||||
}
|
||||
for(String seq : sequencesWithReadCounts.keySet()) {
|
||||
occupancyMap.merge(seq, 1, (oldValue, newValue) -> oldValue + newValue);
|
||||
readCountMap.merge(seq, sequencesWithReadCounts.get(seq), (oldValue, newValue) -> oldValue + newValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (readError) { //only add a new spurious cell if there was a read error
|
||||
spuriousCells.add(spuriousCell);
|
||||
}
|
||||
}
|
||||
//add all spurious cells to the well
|
||||
well.addAll(spuriousCells);
|
||||
}
|
||||
|
||||
public String getSourceFileName() {
|
||||
return sourceFile;
|
||||
}
|
||||
|
||||
@@ -13,7 +13,7 @@ import java.util.regex.Pattern;
|
||||
|
||||
public class PlateFileReader {
|
||||
|
||||
private List<List<Integer[]>> wells = new ArrayList<>();
|
||||
private List<List<String[]>> wells = new ArrayList<>();
|
||||
private String filename;
|
||||
|
||||
public PlateFileReader(String filename){
|
||||
@@ -32,17 +32,17 @@ public class PlateFileReader {
|
||||
CSVParser parser = new CSVParser(reader, plateFileFormat);
|
||||
){
|
||||
for(CSVRecord record: parser.getRecords()) {
|
||||
List<Integer[]> well = new ArrayList<>();
|
||||
List<String[]> well = new ArrayList<>();
|
||||
for(String s: record) {
|
||||
if(!"".equals(s)) {
|
||||
String[] intString = s.replaceAll("\\[", "")
|
||||
String[] sequences = s.replaceAll("\\[", "")
|
||||
.replaceAll("]", "")
|
||||
.replaceAll(" ", "")
|
||||
.split(",");
|
||||
//System.out.println(intString);
|
||||
Integer[] arr = new Integer[intString.length];
|
||||
for (int i = 0; i < intString.length; i++) {
|
||||
arr[i] = Integer.valueOf(intString[i]);
|
||||
//System.out.println(sequences);
|
||||
String[] arr = new String[sequences.length];
|
||||
for (int i = 0; i < sequences.length; i++) {
|
||||
arr[i] = sequences[i];
|
||||
}
|
||||
well.add(arr);
|
||||
}
|
||||
|
||||
@@ -10,7 +10,7 @@ import java.util.*;
|
||||
|
||||
public class PlateFileWriter {
|
||||
private int size;
|
||||
private List<List<Integer[]>> wells;
|
||||
private List<List<String[]>> wells;
|
||||
private double stdDev;
|
||||
private double lambda;
|
||||
private Double error;
|
||||
@@ -40,13 +40,13 @@ public class PlateFileWriter {
|
||||
}
|
||||
|
||||
public void writePlateFile(){
|
||||
Comparator<List<Integer[]>> listLengthDescending = Comparator.comparingInt(List::size);
|
||||
Comparator<List<String[]>> listLengthDescending = Comparator.comparingInt(List::size);
|
||||
wells.sort(listLengthDescending.reversed());
|
||||
int maxLength = wells.get(0).size();
|
||||
List<List<String>> wellsAsStrings = new ArrayList<>();
|
||||
for (List<Integer[]> w: wells){
|
||||
for (List<String[]> w: wells){
|
||||
List<String> tmp = new ArrayList<>();
|
||||
for(Integer[] c: w) {
|
||||
for(String[] c: w) {
|
||||
tmp.add(Arrays.toString(c));
|
||||
}
|
||||
wellsAsStrings.add(tmp);
|
||||
|
||||
@@ -12,9 +12,6 @@ import java.text.NumberFormat;
|
||||
import java.time.Instant;
|
||||
import java.time.Duration;
|
||||
import java.util.*;
|
||||
import java.util.stream.IntStream;
|
||||
|
||||
import static java.lang.Float.*;
|
||||
|
||||
//NOTE: "sequence" in method and variable names refers to a peptide sequence from a simulated T cell
|
||||
public class Simulator implements GraphModificationFunctions {
|
||||
@@ -24,9 +21,14 @@ public class Simulator implements GraphModificationFunctions {
|
||||
//sourceVertexIndices and targetVertexIndices are indices within the cell to use as for the two sets of vertices
|
||||
//in the bipartite graph. "Source" and "target" are JGraphT terms for the two vertices an edge touches,
|
||||
//even if not directed.
|
||||
public static GraphWithMapData makeGraph(CellSample cellSample, Plate samplePlate, boolean verbose) {
|
||||
public static GraphWithMapData makeGraph(CellSample cellSample, Plate samplePlate, int readDepth, double readErrorRate, double errorCollisionRate, boolean verbose) {
|
||||
Instant start = Instant.now();
|
||||
List<Integer[]> distinctCells = cellSample.getCells();
|
||||
Map<String, Integer> allAlphas;
|
||||
Map<String, Integer> allBetas;
|
||||
Map<String, Integer> alphaReadCounts = null;
|
||||
Map<String, Integer> betaReadCounts = null;
|
||||
Map<String, Integer> misreadCounts;
|
||||
List<String[]> distinctCells = cellSample.getCells();
|
||||
int[] alphaIndices = {SequenceType.CDR3_ALPHA.ordinal()};
|
||||
int[] betaIndices = {SequenceType.CDR3_BETA.ordinal()};
|
||||
|
||||
@@ -34,13 +36,26 @@ public class Simulator implements GraphModificationFunctions {
|
||||
|
||||
if(verbose){System.out.println("Making cell maps");}
|
||||
//HashMap keyed to Alphas, values Betas
|
||||
Map<Integer, Integer> distCellsMapAlphaKey = makeSequenceToSequenceMap(distinctCells, 0, 1);
|
||||
Map<String, String> distCellsMapAlphaKey = makeSequenceToSequenceMap(distinctCells, 0, 1);
|
||||
if(verbose){System.out.println("Cell maps made");}
|
||||
|
||||
if(verbose){System.out.println("Making well maps");}
|
||||
if(readDepth == 1) {
|
||||
allAlphas = new HashMap<>();
|
||||
samplePlate.assayWellsSequenceS(allAlphas, alphaIndices);
|
||||
allBetas = new HashMap<>();
|
||||
samplePlate.assayWellsSequenceS(allBetas, betaIndices);
|
||||
}
|
||||
else {
|
||||
misreadCounts = new HashMap<>();
|
||||
allAlphas = new HashMap<>();
|
||||
alphaReadCounts = new HashMap<>();
|
||||
samplePlate.assayWellsSequenceSWithReadDepth(misreadCounts, allAlphas, alphaReadCounts, readDepth, readErrorRate, errorCollisionRate, alphaIndices);
|
||||
allBetas = new HashMap<>();
|
||||
betaReadCounts = new HashMap<>();
|
||||
samplePlate.assayWellsSequenceSWithReadDepth(misreadCounts, allBetas, betaReadCounts, readDepth, readErrorRate, errorCollisionRate, betaIndices);
|
||||
}
|
||||
|
||||
Map<Integer, Integer> allAlphas = samplePlate.assayWellsSequenceS(alphaIndices);
|
||||
Map<Integer, Integer> allBetas = samplePlate.assayWellsSequenceS(betaIndices);
|
||||
int alphaCount = allAlphas.size();
|
||||
if(verbose){System.out.println("All alphas count: " + alphaCount);}
|
||||
int betaCount = allBetas.size();
|
||||
@@ -48,10 +63,22 @@ public class Simulator implements GraphModificationFunctions {
|
||||
if(verbose){System.out.println("Well maps made");}
|
||||
|
||||
//ideally we wouldn't do any graph pre-filtering. But sequences present in all wells add a huge number of edges to the graph and don't carry any signal value
|
||||
if(verbose){System.out.println("Removing sequences present in all wells.");}
|
||||
filterByOccupancyThresholds(allAlphas, 1, numWells - 1);
|
||||
filterByOccupancyThresholds(allBetas, 1, numWells - 1);
|
||||
if(verbose){System.out.println("Sequences removed");}
|
||||
if (readDepth == 1) {
|
||||
if(verbose){System.out.println("Removing sequences present in all wells.");}
|
||||
filterByOccupancyThresholds(allAlphas, 1, numWells - 1);
|
||||
filterByOccupancyThresholds(allBetas, 1, numWells - 1);
|
||||
if(verbose){System.out.println("Sequences removed");}
|
||||
}
|
||||
else {
|
||||
if(verbose){System.out.println("Removing sequences present in all wells.");}
|
||||
filterByOccupancyThresholds(allAlphas, 1, numWells - 1);
|
||||
filterByOccupancyThresholds(allBetas, 1, numWells - 1);
|
||||
if(verbose){System.out.println("Sequences removed");}
|
||||
if(verbose){System.out.println("Removing sequences with disparate occupancies and read counts");}
|
||||
filterByOccupancyAndReadCount(allAlphas, alphaReadCounts, readDepth);
|
||||
filterByOccupancyAndReadCount(allBetas, betaReadCounts, readDepth);
|
||||
if(verbose){System.out.println("Sequences removed");}
|
||||
}
|
||||
int pairableAlphaCount = allAlphas.size();
|
||||
if(verbose){System.out.println("Remaining alphas count: " + pairableAlphaCount);}
|
||||
int pairableBetaCount = allBetas.size();
|
||||
@@ -63,17 +90,17 @@ public class Simulator implements GraphModificationFunctions {
|
||||
//distinct indices between the rows and columns. vertexStartValue lets me track where I switch
|
||||
//from numbering rows to columns, so I can assign unique numbers to every vertex, and then
|
||||
//subtract the vertexStartValue from betas to use their vertex labels as array indices
|
||||
Integer vertexStartValue = 0;
|
||||
int vertexStartValue = 0;
|
||||
//keys are sequential integer vertices, values are alphas
|
||||
Map<Integer, Integer> plateVtoAMap = makeVertexToSequenceMap(allAlphas, vertexStartValue);
|
||||
Map<Integer, String> plateVtoAMap = makeVertexToSequenceMap(allAlphas, vertexStartValue);
|
||||
//new start value for vertex to beta map should be one more than final vertex value in alpha map
|
||||
vertexStartValue += plateVtoAMap.size();
|
||||
//keys are sequential integers vertices, values are betas
|
||||
Map<Integer, Integer> plateVtoBMap = makeVertexToSequenceMap(allBetas, vertexStartValue);
|
||||
Map<Integer, String> plateVtoBMap = makeVertexToSequenceMap(allBetas, vertexStartValue);
|
||||
//keys are alphas, values are sequential integer vertices from previous map
|
||||
Map<Integer, Integer> plateAtoVMap = invertVertexMap(plateVtoAMap);
|
||||
Map<String, Integer> plateAtoVMap = invertVertexMap(plateVtoAMap);
|
||||
//keys are betas, values are sequential integer vertices from previous map
|
||||
Map<Integer, Integer> plateBtoVMap = invertVertexMap(plateVtoBMap);
|
||||
Map<String, Integer> plateBtoVMap = invertVertexMap(plateVtoBMap);
|
||||
if(verbose){System.out.println("Vertex maps made");}
|
||||
|
||||
//make adjacency matrix for bipartite graph generator
|
||||
@@ -81,9 +108,9 @@ public class Simulator implements GraphModificationFunctions {
|
||||
//for a bipartite graph, and all the SimpleWeightedBipartiteGraphMatrixGenerator class expects.)
|
||||
if(verbose){System.out.println("Creating adjacency matrix");}
|
||||
//Count how many wells each alpha sequence appears in
|
||||
Map<Integer, Integer> alphaWellCounts = new HashMap<>();
|
||||
Map<String, Integer> alphaWellCounts = new HashMap<>();
|
||||
//count how many wells each beta sequence appears in
|
||||
Map<Integer, Integer> betaWellCounts = new HashMap<>();
|
||||
Map<String, Integer> betaWellCounts = new HashMap<>();
|
||||
//the adjacency matrix to be used by the graph generator
|
||||
double[][] weights = new double[plateVtoAMap.size()][plateVtoBMap.size()];
|
||||
countSequencesAndFillMatrix(samplePlate, allAlphas, allBetas, plateAtoVMap,
|
||||
@@ -101,8 +128,14 @@ public class Simulator implements GraphModificationFunctions {
|
||||
//List<Integer> alphaVertices = new ArrayList<>(plateVtoAMap.keySet()); //This will work because LinkedHashMap preserves order of entry
|
||||
List<Vertex> alphaVertices = new ArrayList<>();
|
||||
//start with map of all alphas mapped to vertex values, get occupancy from the alphaWellCounts map
|
||||
for (Integer seq : plateAtoVMap.keySet()) {
|
||||
Vertex alphaVertex = new Vertex(SequenceType.CDR3_ALPHA, seq, alphaWellCounts.get(seq), plateAtoVMap.get(seq));
|
||||
for (String seq : plateAtoVMap.keySet()) {
|
||||
Vertex alphaVertex;
|
||||
if (readDepth == 1) {
|
||||
alphaVertex = new Vertex(SequenceType.CDR3_ALPHA, seq, alphaWellCounts.get(seq), plateAtoVMap.get(seq));
|
||||
}
|
||||
else {
|
||||
alphaVertex = new Vertex(SequenceType.CDR3_ALPHA, seq, alphaWellCounts.get(seq), plateAtoVMap.get(seq), alphaReadCounts.get(seq));
|
||||
}
|
||||
alphaVertices.add(alphaVertex);
|
||||
}
|
||||
//Sort to make sure the order of vertices in list matches the order of the adjacency matrix
|
||||
@@ -112,8 +145,14 @@ public class Simulator implements GraphModificationFunctions {
|
||||
//the list of beta vertices
|
||||
//List<Integer> betaVertices = new ArrayList<>(plateVtoBMap.keySet());//This will work because LinkedHashMap preserves order of entry
|
||||
List<Vertex> betaVertices = new ArrayList<>();
|
||||
for (Integer seq : plateBtoVMap.keySet()) {
|
||||
Vertex betaVertex = new Vertex(SequenceType.CDR3_BETA, seq, betaWellCounts.get(seq), plateBtoVMap.get(seq));
|
||||
for (String seq : plateBtoVMap.keySet()) {
|
||||
Vertex betaVertex;
|
||||
if (readDepth == 1) {
|
||||
betaVertex = new Vertex(SequenceType.CDR3_BETA, seq, betaWellCounts.get(seq), plateBtoVMap.get(seq));
|
||||
}
|
||||
else {
|
||||
betaVertex = new Vertex(SequenceType.CDR3_BETA, seq, betaWellCounts.get(seq), plateBtoVMap.get(seq), betaReadCounts.get(seq));
|
||||
}
|
||||
betaVertices.add(betaVertex);
|
||||
}
|
||||
//Sort to make sure the order of vertices in list matches the order of the adjacency matrix
|
||||
@@ -129,7 +168,8 @@ public class Simulator implements GraphModificationFunctions {
|
||||
Duration time = Duration.between(start, stop);
|
||||
|
||||
//create GraphWithMapData object
|
||||
GraphWithMapData output = new GraphWithMapData(graph, numWells, samplePlate.getPopulations(), distCellsMapAlphaKey, alphaCount, betaCount, time);
|
||||
GraphWithMapData output = new GraphWithMapData(graph, numWells, samplePlate.getPopulations(), distCellsMapAlphaKey,
|
||||
alphaCount, betaCount, readDepth, readErrorRate, errorCollisionRate, time);
|
||||
//Set source file name in graph to name of sample plate
|
||||
output.setSourceFilename(samplePlate.getFilename());
|
||||
//return GraphWithMapData object
|
||||
@@ -147,7 +187,7 @@ public class Simulator implements GraphModificationFunctions {
|
||||
int numWells = data.getNumWells();
|
||||
//Integer alphaCount = data.getAlphaCount();
|
||||
//Integer betaCount = data.getBetaCount();
|
||||
Map<Integer, Integer> distCellsMapAlphaKey = data.getDistCellsMapAlphaKey();
|
||||
Map<String, String> distCellsMapAlphaKey = data.getDistCellsMapAlphaKey();
|
||||
Set<Vertex> alphas = new HashSet<>();
|
||||
Set<Vertex> betas = new HashSet<>();
|
||||
for(Vertex v: graph.vertexSet()) {
|
||||
@@ -228,7 +268,7 @@ public class Simulator implements GraphModificationFunctions {
|
||||
int trueCount = 0;
|
||||
int falseCount = 0;
|
||||
boolean check;
|
||||
Map<Integer, Integer> matchMap = new HashMap<>();
|
||||
Map<String, String> matchMap = new HashMap<>();
|
||||
while(weightIter.hasNext()) {
|
||||
e = weightIter.next();
|
||||
Vertex source = graph.getEdgeSource(e);
|
||||
@@ -291,31 +331,44 @@ public class Simulator implements GraphModificationFunctions {
|
||||
populationsStringBuilder.append(wellPopulations[i].toString());
|
||||
}
|
||||
String wellPopulationsString = populationsStringBuilder.toString();
|
||||
//graph generation time
|
||||
Duration graphTime = data.getTime();
|
||||
//MWM run time
|
||||
Duration pairingTime = Duration.between(start, stop);
|
||||
//total simulation time
|
||||
Duration time = Duration.between(start, stop);
|
||||
time = time.plus(data.getTime());
|
||||
Duration totalTime = graphTime.plus(pairingTime);
|
||||
|
||||
|
||||
Map<String, String> metadata = new LinkedHashMap<>();
|
||||
metadata.put("sample plate filename", data.getSourceFilename());
|
||||
metadata.put("graph filename", dataFilename);
|
||||
metadata.put("algorithm type", algoType);
|
||||
metadata.put("MWM algorithm type", algoType);
|
||||
metadata.put("matching weight", totalMatchingWeight.toString());
|
||||
metadata.put("well populations", wellPopulationsString);
|
||||
metadata.put("total alphas on plate", data.getAlphaCount().toString());
|
||||
metadata.put("total betas on plate", data.getBetaCount().toString());
|
||||
metadata.put("sequence read depth", data.getReadDepth().toString());
|
||||
metadata.put("sequence read error rate", data.getReadErrorRate().toString());
|
||||
metadata.put("read error collision rate", data.getErrorCollisionRate().toString());
|
||||
metadata.put("total alphas read from plate", data.getAlphaCount().toString());
|
||||
metadata.put("total betas read from plate", data.getBetaCount().toString());
|
||||
//HARD CODED, PARAMETERIZE LATER
|
||||
metadata.put("pre-filter sequences present in all wells", "true");
|
||||
//HARD CODED, PARAMETERIZE LATER
|
||||
metadata.put("pre-filter sequences based on occupancy/read count discrepancy", "true");
|
||||
metadata.put("alphas in graph (after pre-filtering)", graphAlphaCount.toString());
|
||||
metadata.put("betas in graph (after pre-filtering)", graphBetaCount.toString());
|
||||
metadata.put("high overlap threshold", highThreshold.toString());
|
||||
metadata.put("low overlap threshold", lowThreshold.toString());
|
||||
metadata.put("minimum overlap percent", minOverlapPercent.toString());
|
||||
metadata.put("maximum occupancy difference", maxOccupancyDifference.toString());
|
||||
metadata.put("high overlap threshold for pairing", highThreshold.toString());
|
||||
metadata.put("low overlap threshold for pairing", lowThreshold.toString());
|
||||
metadata.put("minimum overlap percent for pairing", minOverlapPercent.toString());
|
||||
metadata.put("maximum occupancy difference for pairing", maxOccupancyDifference.toString());
|
||||
metadata.put("pairing attempt rate", attemptRateTrunc.toString());
|
||||
metadata.put("correct pairing count", Integer.toString(trueCount));
|
||||
metadata.put("incorrect pairing count", Integer.toString(falseCount));
|
||||
metadata.put("pairing error rate", pairingErrorRateTrunc.toString());
|
||||
metadata.put("simulation time (seconds)", nf.format(time.toSeconds()));
|
||||
metadata.put("time to generate graph (seconds)", nf.format(graphTime.toSeconds()));
|
||||
metadata.put("time to pair sequences (seconds)",nf.format(pairingTime.toSeconds()));
|
||||
metadata.put("total simulation time (seconds)", nf.format(totalTime.toSeconds()));
|
||||
//create MatchingResult object
|
||||
MatchingResult output = new MatchingResult(metadata, header, allResults, matchMap, time);
|
||||
MatchingResult output = new MatchingResult(metadata, header, allResults, matchMap);
|
||||
if(verbose){
|
||||
for(String s: output.getComments()){
|
||||
System.out.println(s);
|
||||
@@ -637,50 +690,67 @@ public class Simulator implements GraphModificationFunctions {
|
||||
// }
|
||||
|
||||
//Remove sequences based on occupancy
|
||||
public static void filterByOccupancyThresholds(Map<Integer, Integer> wellMap, int low, int high){
|
||||
List<Integer> noise = new ArrayList<>();
|
||||
for(Integer k: wellMap.keySet()){
|
||||
public static void filterByOccupancyThresholds(Map<String, Integer> wellMap, int low, int high){
|
||||
List<String> noise = new ArrayList<>();
|
||||
for(String k: wellMap.keySet()){
|
||||
if((wellMap.get(k) > high) || (wellMap.get(k) < low)){
|
||||
noise.add(k);
|
||||
}
|
||||
}
|
||||
for(Integer k: noise) {
|
||||
for(String k: noise) {
|
||||
wellMap.remove(k);
|
||||
}
|
||||
}
|
||||
|
||||
public static void filterByOccupancyAndReadCount(Map<String, Integer> sequences,
|
||||
Map<String, Integer> sequenceReadCounts, int readDepth) {
|
||||
List<String> noise = new ArrayList<>();
|
||||
for(String k : sequences.keySet()){
|
||||
//occupancy times read depth should be more than half the sequence read count if the read error rate is low
|
||||
Integer threshold = (sequences.get(k) * readDepth) / 2;
|
||||
if(sequenceReadCounts.get(k) < threshold) {
|
||||
noise.add(k);
|
||||
}
|
||||
}
|
||||
for(String k : noise) {
|
||||
sequences.remove(k);
|
||||
}
|
||||
}
|
||||
|
||||
//Counts the well occupancy of the row peptides and column peptides into given maps, and
|
||||
//fills weights in the given 2D array
|
||||
private static void countSequencesAndFillMatrix(Plate samplePlate,
|
||||
Map<Integer,Integer> allRowSequences,
|
||||
Map<Integer,Integer> allColumnSequences,
|
||||
Map<Integer,Integer> rowSequenceToVertexMap,
|
||||
Map<Integer,Integer> columnSequenceToVertexMap,
|
||||
Map<String, Integer> allRowSequences,
|
||||
Map<String, Integer> allColumnSequences,
|
||||
Map<String, Integer> rowSequenceToVertexMap,
|
||||
Map<String, Integer> columnSequenceToVertexMap,
|
||||
int[] rowSequenceIndices,
|
||||
int[] colSequenceIndices,
|
||||
Map<Integer, Integer> rowSequenceCounts,
|
||||
Map<Integer,Integer> columnSequenceCounts,
|
||||
Map<String, Integer> rowSequenceCounts,
|
||||
Map<String, Integer> columnSequenceCounts,
|
||||
double[][] weights){
|
||||
Map<Integer, Integer> wellNRowSequences = null;
|
||||
Map<Integer, Integer> wellNColumnSequences = null;
|
||||
Map<String, Integer> wellNRowSequences;
|
||||
Map<String, Integer> wellNColumnSequences;
|
||||
int vertexStartValue = rowSequenceToVertexMap.size();
|
||||
int numWells = samplePlate.getSize();
|
||||
for (int n = 0; n < numWells; n++) {
|
||||
wellNRowSequences = samplePlate.assayWellsSequenceS(n, rowSequenceIndices);
|
||||
for (Integer a : wellNRowSequences.keySet()) {
|
||||
wellNRowSequences = new HashMap<>();
|
||||
samplePlate.assayWellsSequenceS(wellNRowSequences, n, rowSequenceIndices);
|
||||
for (String a : wellNRowSequences.keySet()) {
|
||||
if(allRowSequences.containsKey(a)){
|
||||
rowSequenceCounts.merge(a, 1, (oldValue, newValue) -> oldValue + newValue);
|
||||
}
|
||||
}
|
||||
wellNColumnSequences = samplePlate.assayWellsSequenceS(n, colSequenceIndices);
|
||||
for (Integer b : wellNColumnSequences.keySet()) {
|
||||
wellNColumnSequences = new HashMap<>();
|
||||
samplePlate.assayWellsSequenceS(wellNColumnSequences, n, colSequenceIndices);
|
||||
for (String b : wellNColumnSequences.keySet()) {
|
||||
if(allColumnSequences.containsKey(b)){
|
||||
columnSequenceCounts.merge(b, 1, (oldValue, newValue) -> oldValue + newValue);
|
||||
}
|
||||
}
|
||||
for (Integer i : wellNRowSequences.keySet()) {
|
||||
for (String i : wellNRowSequences.keySet()) {
|
||||
if(allRowSequences.containsKey(i)){
|
||||
for (Integer j : wellNColumnSequences.keySet()) {
|
||||
for (String j : wellNColumnSequences.keySet()) {
|
||||
if(allColumnSequences.containsKey(j)){
|
||||
weights[rowSequenceToVertexMap.get(i)][columnSequenceToVertexMap.get(j) - vertexStartValue] += 1.0;
|
||||
}
|
||||
@@ -691,27 +761,27 @@ public class Simulator implements GraphModificationFunctions {
|
||||
}
|
||||
}
|
||||
|
||||
private static Map<Integer, Integer> makeSequenceToSequenceMap(List<Integer[]> cells, int keySequenceIndex,
|
||||
int valueSequenceIndex){
|
||||
Map<Integer, Integer> keySequenceToValueSequenceMap = new HashMap<>();
|
||||
for (Integer[] cell : cells) {
|
||||
private static Map<String, String> makeSequenceToSequenceMap(List<String[]> cells, int keySequenceIndex,
|
||||
int valueSequenceIndex){
|
||||
Map<String, String> keySequenceToValueSequenceMap = new HashMap<>();
|
||||
for (String[] cell : cells) {
|
||||
keySequenceToValueSequenceMap.put(cell[keySequenceIndex], cell[valueSequenceIndex]);
|
||||
}
|
||||
return keySequenceToValueSequenceMap;
|
||||
}
|
||||
|
||||
private static Map<Integer, Integer> makeVertexToSequenceMap(Map<Integer, Integer> sequences, Integer startValue) {
|
||||
Map<Integer, Integer> map = new LinkedHashMap<>(); //LinkedHashMap to preserve order of entry
|
||||
private static Map<Integer, String> makeVertexToSequenceMap(Map<String, Integer> sequences, Integer startValue) {
|
||||
Map<Integer, String> map = new LinkedHashMap<>(); //LinkedHashMap to preserve order of entry
|
||||
Integer index = startValue; //is this necessary? I don't think I use this.
|
||||
for (Integer k: sequences.keySet()) {
|
||||
for (String k: sequences.keySet()) {
|
||||
map.put(index, k);
|
||||
index++;
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
private static Map<Integer, Integer> invertVertexMap(Map<Integer, Integer> map) {
|
||||
Map<Integer, Integer> inverse = new HashMap<>();
|
||||
private static Map<String, Integer> invertVertexMap(Map<Integer, String> map) {
|
||||
Map<String, Integer> inverse = new HashMap<>();
|
||||
for (Integer k : map.keySet()) {
|
||||
inverse.put(map.get(k), k);
|
||||
}
|
||||
|
||||
@@ -1,10 +1,15 @@
|
||||
import org.jheaps.AddressableHeap;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
public class Vertex implements Serializable, Comparable<Vertex> {
|
||||
private SequenceType type;
|
||||
private Integer vertexLabel;
|
||||
private Integer sequence;
|
||||
private String sequence;
|
||||
private Integer occupancy;
|
||||
private Integer readCount;
|
||||
private Double potential;
|
||||
private AddressableHeap queue;
|
||||
|
||||
public Vertex(Integer vertexLabel) {
|
||||
this.vertexLabel = vertexLabel;
|
||||
@@ -13,11 +18,20 @@ public class Vertex implements Serializable, Comparable<Vertex> {
|
||||
this.vertexLabel = Integer.parseInt((vertexLabel));
|
||||
}
|
||||
|
||||
public Vertex(SequenceType type, Integer sequence, Integer occupancy, Integer vertexLabel) {
|
||||
public Vertex(SequenceType type, String sequence, Integer occupancy, Integer vertexLabel) {
|
||||
this.type = type;
|
||||
this.vertexLabel = vertexLabel;
|
||||
this.sequence = sequence;
|
||||
this.occupancy = occupancy;
|
||||
this.readCount = 1;
|
||||
}
|
||||
|
||||
public Vertex(SequenceType type, String sequence, Integer occupancy, Integer vertexLabel, Integer readCount) {
|
||||
this.type = type;
|
||||
this.vertexLabel = vertexLabel;
|
||||
this.sequence = sequence;
|
||||
this.occupancy = occupancy;
|
||||
this.readCount = readCount;
|
||||
}
|
||||
|
||||
|
||||
@@ -37,13 +51,12 @@ public class Vertex implements Serializable, Comparable<Vertex> {
|
||||
this.vertexLabel = Integer.parseInt(label);
|
||||
}
|
||||
|
||||
public Integer getSequence() {
|
||||
|
||||
public String getSequence() {
|
||||
return sequence;
|
||||
}
|
||||
|
||||
public void setSequence(String sequence) {
|
||||
this.sequence = Integer.parseInt(sequence);
|
||||
this.sequence = sequence;
|
||||
}
|
||||
|
||||
public Integer getOccupancy() {
|
||||
@@ -94,4 +107,11 @@ public class Vertex implements Serializable, Comparable<Vertex> {
|
||||
return this.vertexLabel - other.getVertexLabel();
|
||||
}
|
||||
|
||||
public Integer getReadCount() {
|
||||
return readCount;
|
||||
}
|
||||
|
||||
public void setReadCount(Integer readCount) {
|
||||
this.readCount = readCount;
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user