Compare commits
162 Commits
a5db89cb0b
...
v4.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b7c86f20b3 | ||
|
|
3a47efd361 | ||
|
|
58bb04c431 | ||
|
|
610da68262 | ||
|
|
9973473cc6 | ||
|
|
8781afd74c | ||
|
|
88b6c79caa | ||
|
|
35a519d499 | ||
|
|
5bd1e568a6 | ||
|
|
4ad1979c18 | ||
|
|
423c9d5c93 | ||
|
|
7c3c95ab4b | ||
|
|
d71a99555c | ||
|
|
2bf2a9f5f7 | ||
|
|
810abdb705 | ||
|
|
f7b3c133bf | ||
|
|
14fcfe1ff3 | ||
|
|
70fec95a00 | ||
|
|
077af3b46e | ||
|
|
db99c74810 | ||
|
|
13a1af1f71 | ||
|
|
199c81f983 | ||
|
|
19a2a35f07 | ||
|
|
36c628cde5 | ||
|
|
1ddac63b0a | ||
|
|
e795b4cdd0 | ||
|
|
60cf6775c2 | ||
|
|
8a8c89c9ba | ||
|
|
86371668d5 | ||
|
|
d81ab25a68 | ||
|
|
02c8e6aacb | ||
|
|
f84dfb2b4b | ||
|
|
184278b72e | ||
|
|
489369f533 | ||
|
|
fbee591273 | ||
|
|
603a999b59 | ||
|
|
c3df4b12ab | ||
|
|
d1a56c3578 | ||
|
|
16daf02dd6 | ||
|
|
04a077da2e | ||
|
|
740835f814 | ||
|
|
8a77d53f1f | ||
|
|
58fa140ee5 | ||
|
|
475bbf3107 | ||
|
|
4f2fa4cbbe | ||
|
|
58d418e44b | ||
|
|
1971a96467 | ||
|
|
e699795521 | ||
|
|
bd6d010b0b | ||
|
|
61d1eb3eb1 | ||
|
|
cb41b45204 | ||
|
|
a84d2e1bfe | ||
|
|
7b61d2c0d7 | ||
|
|
56454417c0 | ||
|
|
8ee1c5903e | ||
|
|
5c03909a11 | ||
|
|
e4e5a1f979 | ||
|
|
73c83bf35d | ||
|
|
06e72314b0 | ||
|
|
63317f2aa0 | ||
|
|
a054c0c20a | ||
|
|
29b844afd2 | ||
|
|
dea4972927 | ||
|
|
9ae38bf247 | ||
|
|
3ba305abdb | ||
|
|
3707923398 | ||
|
|
cf771ce574 | ||
| f980722b56 | |||
| 1df86f01df | |||
| 96ba57d653 | |||
| b602fb02f1 | |||
| 325e1ebe2b | |||
| df047267ee | |||
| 03e8d31210 | |||
| 582dc3ef40 | |||
| 4c872ed48e | |||
| 3fc39302c7 | |||
| 578bdc0fbf | |||
| 8275cf7740 | |||
| 64209691f0 | |||
| 1886800873 | |||
| bedf0894bc | |||
| 2ac3451842 | |||
| 67ec3f3764 | |||
| b5a8b7e2d5 | |||
| 9fb3095f0f | |||
| 25acf920c2 | |||
| f301327693 | |||
| e04d2d6777 | |||
| 3e41afaa64 | |||
| bc5d67680d | |||
| f2347e8fc2 | |||
| c8364d8a6e | |||
| 6f5afbc6ec | |||
| fb4d22e7a4 | |||
| e10350c214 | |||
| b1155f8100 | |||
| 12b003a69f | |||
| 32c5bcaaff | |||
| 2485ac4cf6 | |||
| 05556bce0c | |||
| a822f69ea4 | |||
| 3d1f8668ee | |||
| 40c743308b | |||
| 5246cc4a0c | |||
| a5f7c0641d | |||
| 8ebfc1469f | |||
| b53f5f1cc0 | |||
| 974d2d650c | |||
| 6b5837e6ce | |||
| b4cc240048 | |||
| ff72c9b359 | |||
| 88eb8aca50 | |||
| 98bf452891 | |||
| c2db4f87c1 | |||
| 8935407ade | |||
| 9fcc20343d | |||
| 817fe51708 | |||
| 1ea68045ce | |||
| 75b2aa9553 | |||
| b3dc10f287 | |||
| fb8d8d8785 | |||
| ab437512e9 | |||
| 7b03a3cce8 | |||
| f032d3e852 | |||
| b604b1d3cd | |||
| e4d094d796 | |||
| f385ebc31f | |||
| 8745550e11 | |||
| 41805135b3 | |||
| 373a5e02f9 | |||
| 7f18311054 | |||
| bcb816c3e6 | |||
| dad0fd35fd | |||
| 35d580cfcf | |||
| ab8d98ed81 | |||
| 3d9890e16a | |||
| dd64ac2731 | |||
| a5238624f1 | |||
| d8ba42b801 | |||
| 8edd89d784 | |||
| 2829b88689 | |||
| 108b0ec13f | |||
| a8b58d3f79 | |||
| bf64d57731 | |||
| c068c3db3c | |||
| 4bcda9b66c | |||
| 17ae763c6c | |||
| decdb147a9 | |||
| 74ffbfd8ac | |||
| 08699ce8ce | |||
| 69b0cc535c | |||
| e58f7b0a55 | |||
| dd2164c250 | |||
| 7323093bdc | |||
| f904cf6672 | |||
| 3ccee9891b | |||
| 40c2be1cfb | |||
| 4b597c4e5e | |||
| b2398531a3 | |||
| 8e9a250890 | |||
| e2a996c997 |
227
readme.md
227
readme.md
@@ -12,7 +12,7 @@ Unlike pairSEQ, which calculates p-values for every TCR alpha/beta overlap and c
|
|||||||
against a null distribution, BiGpairSEQ does not do any statistical calculations
|
against a null distribution, BiGpairSEQ does not do any statistical calculations
|
||||||
directly.
|
directly.
|
||||||
|
|
||||||
BiGpairSEQ creates a [simple bipartite weighted graph](https://en.wikipedia.org/wiki/Bipartite_graph) representing the sample plate.
|
BiGpairSEQ creates a [weighted bipartite graph](https://en.wikipedia.org/wiki/Bipartite_graph) representing the sample plate.
|
||||||
The distinct TCRA and TCRB sequences form the two sets of vertices. Every TCRA/TCRB pair that share a well
|
The distinct TCRA and TCRB sequences form the two sets of vertices. Every TCRA/TCRB pair that share a well
|
||||||
are connected by an edge, with the edge weight set to the number of wells in which both sequences appear.
|
are connected by an edge, with the edge weight set to the number of wells in which both sequences appear.
|
||||||
(Sequences present in *all* wells are filtered out prior to creating the graph, as there is no signal in their occupancy pattern.)
|
(Sequences present in *all* wells are filtered out prior to creating the graph, as there is no signal in their occupancy pattern.)
|
||||||
@@ -20,8 +20,8 @@ The problem of pairing TCRA/TCRB sequences thus reduces to the "assignment probl
|
|||||||
matching on a bipartite graph--the subset of vertex-disjoint edges whose weights sum to the maximum possible value.
|
matching on a bipartite graph--the subset of vertex-disjoint edges whose weights sum to the maximum possible value.
|
||||||
|
|
||||||
This is a well-studied combinatorial optimization problem, with many known solutions.
|
This is a well-studied combinatorial optimization problem, with many known solutions.
|
||||||
The most efficient algorithm known to the author for maximum weight matching of a bipartite graph with strictly integral weights
|
The most efficient algorithm known to the author for maximum weight matching of a bipartite graph with strictly integral
|
||||||
is from Duan and Su (2012). For a graph with m edges, n vertices per side, and maximum integer edge weight N,
|
weights is from Duan and Su (2012). For a graph with m edges, n vertices per side, and maximum integer edge weight N,
|
||||||
their algorithm runs in **O(m sqrt(n) log(N))** time. As the graph representation of a pairSEQ experiment is
|
their algorithm runs in **O(m sqrt(n) log(N))** time. As the graph representation of a pairSEQ experiment is
|
||||||
bipartite with integer weights, this algorithm is ideal for BiGpairSEQ.
|
bipartite with integer weights, this algorithm is ideal for BiGpairSEQ.
|
||||||
|
|
||||||
@@ -29,17 +29,13 @@ Unfortunately, it's a fairly new algorithm, and not yet implemented by the graph
|
|||||||
So this program instead uses the Fibonacci heap-based algorithm of Fredman and Tarjan (1987), which has a worst-case
|
So this program instead uses the Fibonacci heap-based algorithm of Fredman and Tarjan (1987), which has a worst-case
|
||||||
runtime of **O(n (n log(n) + m))**. The algorithm is implemented as described in Melhorn and Näher (1999).
|
runtime of **O(n (n log(n) + m))**. The algorithm is implemented as described in Melhorn and Näher (1999).
|
||||||
|
|
||||||
The current version of the program uses a pairing heap instead of a Fibonacci heap for its priority queue,
|
|
||||||
which has lower theoretical efficiency but also lower complexity overhead, and is often equivalently performant
|
|
||||||
in practice.
|
|
||||||
|
|
||||||
## USAGE
|
## USAGE
|
||||||
|
|
||||||
### RUNNING THE PROGRAM
|
### RUNNING THE PROGRAM
|
||||||
|
|
||||||
[Download the current version of BiGpairSEQ_Sim.](https://gitea.ejsf.synology.me/efischer/BiGpairSEQ/releases)
|
[Download the current version of BiGpairSEQ_Sim.](https://gitea.ejsf.synology.me/efischer/BiGpairSEQ/releases)
|
||||||
|
|
||||||
BiGpairSEQ_Sim is an executable .jar file. Requires Java 11 or higher. [OpenJDK 17](https://jdk.java.net/17/)
|
BiGpairSEQ_Sim is an executable .jar file. Requires Java 14 or higher. [OpenJDK 17](https://jdk.java.net/17/)
|
||||||
recommended.
|
recommended.
|
||||||
|
|
||||||
Run with the command:
|
Run with the command:
|
||||||
@@ -47,39 +43,75 @@ Run with the command:
|
|||||||
`java -jar BiGpairSEQ_Sim.jar`
|
`java -jar BiGpairSEQ_Sim.jar`
|
||||||
|
|
||||||
Processing sample plates with tens of thousands of sequences may require large amounts
|
Processing sample plates with tens of thousands of sequences may require large amounts
|
||||||
of RAM. It is often desirable to increase the JVM maximum heap allocation with the -Xmx flag.
|
of RAM. It is often desirable to increase the JVM maximum heap allocation with the `-Xmx` flag.
|
||||||
For example, to run the program with 32 gigabytes of memory, use the command:
|
For example, to run the program with 32 gigabytes of memory, use the command:
|
||||||
|
|
||||||
`java -Xmx32G -jar BiGpairSEQ_Sim.jar`
|
`java -Xmx32G -jar BiGpairSEQ_Sim.jar`
|
||||||
|
|
||||||
Once running, BiGpairSEQ_Sim has an interactive, menu-driven CLI for generating files and simulating TCR pairing. The
|
There are a number of command line options, to allow the program to be used in shell scripts. For a full list,
|
||||||
main menu looks like this:
|
use the `-help` flag:
|
||||||
|
|
||||||
|
`java -jar BiGpairSEQ_Sim.jar -help`
|
||||||
|
|
||||||
|
If no command line arguments are given, BiGpairSEQ_Sim will launch with an interactive, menu-driven CLI for
|
||||||
|
generating files and simulating TCR pairing. The main menu looks like this:
|
||||||
|
|
||||||
```
|
```
|
||||||
--------BiGPairSEQ SIMULATOR--------
|
--------BiGPairSEQ SIMULATOR--------
|
||||||
ALPHA/BETA T CELL RECEPTOR MATCHING
|
ALPHA/BETA T CELL RECEPTOR MATCHING
|
||||||
USING WEIGHTED BIPARTITE GRAPHS
|
USING WEIGHTED BIPARTITE GRAPHS
|
||||||
------------------------------------
|
------------------------------------
|
||||||
Please select an option:
|
Please select an option:
|
||||||
1) Generate a population of distinct cells
|
1) Generate a population of distinct cells
|
||||||
2) Generate a sample plate of T cells
|
2) Generate a sample plate of T cells
|
||||||
3) Generate CDR3 alpha/beta occupancy data and overlap graph
|
3) Generate CDR3 alpha/beta occupancy data and overlap graph
|
||||||
4) Simulate bipartite graph CDR3 alpha/beta matching (BiGpairSEQ)
|
4) Simulate bipartite graph CDR3 alpha/beta matching (BiGpairSEQ)
|
||||||
|
8) Options
|
||||||
9) About/Acknowledgments
|
9) About/Acknowledgments
|
||||||
0) Exit
|
0) Exit
|
||||||
```
|
```
|
||||||
|
|
||||||
### OUTPUT
|
By default, the Options menu looks like this:
|
||||||
|
```
|
||||||
|
--------------OPTIONS---------------
|
||||||
|
1) Turn on cell sample file caching
|
||||||
|
2) Turn on plate file caching
|
||||||
|
3) Turn on graph/data file caching
|
||||||
|
4) Turn off serialized binary graph output
|
||||||
|
5) Turn on GraphML graph output
|
||||||
|
6) Maximum weight matching algorithm options
|
||||||
|
0) Return to main menu
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
### INPUT/OUTPUT
|
||||||
|
|
||||||
To run the simulation, the program reads and writes 4 kinds of files:
|
To run the simulation, the program reads and writes 4 kinds of files:
|
||||||
* Cell Sample files in CSV format
|
* Cell Sample files in CSV format
|
||||||
* Sample Plate files in CSV format
|
* Sample Plate files in CSV format
|
||||||
* Graph and Data files in binary object serialization format
|
* Graph/Data files in binary object serialization format
|
||||||
* Matching Results files in CSV format
|
* Matching Results files in CSV format
|
||||||
|
|
||||||
When entering filenames, it is not necessary to include the file extension (.csv or .ser). When reading or
|
These files are often generated in sequence. When entering filenames, it is not necessary to include the file extension
|
||||||
writing files, the program will automatically add the correct extension to any filename without one.
|
(.csv or .ser). When reading or writing files, the program will automatically add the correct extension to any filename
|
||||||
|
without one.
|
||||||
|
|
||||||
|
To save file I/O time, the most recent instance of each of these four
|
||||||
|
files either generated or read from disk can be cached in program memory. When caching is active, subsequent uses of the
|
||||||
|
same data file won't need to be read in again until another file of that type is used or generated,
|
||||||
|
or caching is turned off for that file type. The program checks whether it needs to update its cached data by comparing
|
||||||
|
filenames as entered by the user. On encountering a new filename, the program flushes its cache and reads in the new file.
|
||||||
|
|
||||||
|
(Note that cached Graph/Data files must be transformed back into their original state after a matching experiment, which
|
||||||
|
may take some time. Whether file I/O or graph transformation takes longer for graph/data files is likely to be
|
||||||
|
device-specific.)
|
||||||
|
|
||||||
|
The program's caching behavior can be controlled in the Options menu. By default, all caching is OFF.
|
||||||
|
|
||||||
|
The program can optionally output Graph/Data files in GraphML format (.graphml) for data portability. This can be
|
||||||
|
turned on in the Options menu. By default, GraphML output is OFF.
|
||||||
|
|
||||||
|
---
|
||||||
#### Cell Sample Files
|
#### Cell Sample Files
|
||||||
Cell Sample files consist of any number of distinct "T cells." Every cell contains
|
Cell Sample files consist of any number of distinct "T cells." Every cell contains
|
||||||
four sequences: Alpha CDR3, Beta CDR3, Alpha CDR1, Beta CDR1. The sequences are represented by
|
four sequences: Alpha CDR3, Beta CDR3, Alpha CDR1, Beta CDR1. The sequences are represented by
|
||||||
@@ -97,7 +129,6 @@ Comments are preceded by `#`
|
|||||||
|
|
||||||
Structure:
|
Structure:
|
||||||
|
|
||||||
---
|
|
||||||
# Sample contains 1 unique CDR1 for every 4 unique CDR3s.
|
# Sample contains 1 unique CDR1 for every 4 unique CDR3s.
|
||||||
| Alpha CDR3 | Beta CDR3 | Alpha CDR1 | Beta CDR1 |
|
| Alpha CDR3 | Beta CDR3 | Alpha CDR1 | Beta CDR1 |
|
||||||
|---|---|---|---|
|
|---|---|---|---|
|
||||||
@@ -121,15 +152,18 @@ Options when making a Sample Plate file:
|
|||||||
* Standard deviation size
|
* Standard deviation size
|
||||||
* Exponential
|
* Exponential
|
||||||
* Lambda value
|
* Lambda value
|
||||||
* (Based on the slope of the graph in Figure 4C of the pairSEQ paper, the distribution of the original experiment was exponential with a lambda of approximately 0.6. (Howie, et al. 2015))
|
* *(Based on the slope of the graph in Figure 4C of the pairSEQ paper, the distribution of the original experiment was approximately exponential with a lambda ~0.6. (Howie, et al. 2015))*
|
||||||
* Total number of wells on the plate
|
* Total number of wells on the plate
|
||||||
* Number of sections on plate
|
* Well populations random or fixed
|
||||||
* Number of T cells per well
|
* If random, minimum and maximum population sizes
|
||||||
* per section, if more than one section
|
* If fixed
|
||||||
|
* Number of sections on plate
|
||||||
|
* Number of T cells per well
|
||||||
|
* per section, if more than one section
|
||||||
* Dropout rate
|
* Dropout rate
|
||||||
|
|
||||||
Files are in CSV format. There are no header labels. Every row represents a well.
|
Files are in CSV format. There are no header labels. Every row represents a well.
|
||||||
Every column represents an individual cell, containing four sequences, depicted as an array string:
|
Every value represents an individual cell, containing four sequences, depicted as an array string:
|
||||||
`[CDR3A, CDR3B, CDR1A, CDR1B]`. So a representative cell might look like this:
|
`[CDR3A, CDR3B, CDR1A, CDR1B]`. So a representative cell might look like this:
|
||||||
|
|
||||||
`[525902, 791533, -1, 866282]`
|
`[525902, 791533, -1, 866282]`
|
||||||
@@ -139,7 +173,6 @@ Dropout sequences are replaced with the value `-1`. Comments are preceded by `#`
|
|||||||
|
|
||||||
Structure:
|
Structure:
|
||||||
|
|
||||||
---
|
|
||||||
```
|
```
|
||||||
# Cell source file name:
|
# Cell source file name:
|
||||||
# Each row represents one well on the plate
|
# Each row represents one well on the plate
|
||||||
@@ -155,25 +188,41 @@ Structure:
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
#### Graph and Data Files
|
#### Graph/Data Files
|
||||||
Graph and Data files are serialized binaries of a Java object containing the weigthed bipartite graph representation of a
|
Graph/Data files are serialized binaries of a Java object containing the weigthed bipartite graph representation of a
|
||||||
Sample Plate, along with the necessary metadata for matching and results output. Making them requires a Cell Sample file
|
Sample Plate, along with the necessary metadata for matching and results output. Making them requires a Cell Sample file
|
||||||
(to construct a list of correct sequence pairs for checking the accuracy of BiGpairSEQ simulations) and a
|
(to construct a list of correct sequence pairs for checking the accuracy of BiGpairSEQ simulations) and a
|
||||||
Sample Plate file (to construct the associated occupancy graph). These files can be several gigabytes in size.
|
Sample Plate file (to construct the associated occupancy graph).
|
||||||
Writing them to a file lets us generate a graph and its metadata once, then use it for multiple different BiGpairSEQ simulations.
|
|
||||||
|
|
||||||
Options for creating a Graph and Data file:
|
These files can be several gigabytes in size. Writing them to a file lets us generate a graph and its metadata once,
|
||||||
|
then use it for multiple different BiGpairSEQ simulations.
|
||||||
|
|
||||||
|
Options for creating a Graph/Data file:
|
||||||
* The Cell Sample file to use
|
* The Cell Sample file to use
|
||||||
* The Sample Plate file to use. (This must have been generated from the selected Cell Sample file.)
|
* The Sample Plate file to use. (This must have been generated from the selected Cell Sample file.)
|
||||||
|
* Whether to simulate sequence read depth. If simulated:
|
||||||
|
* The read depth (number of times each sequence is read)
|
||||||
|
* The read error rate (probability a sequence is misread)
|
||||||
|
* The error collision rate (probability two misreads produce the same spurious sequence)
|
||||||
|
|
||||||
These files do not have a human-readable structure, and are not portable to other programs. (Export of graphs in a
|
These files do not have a human-readable structure, and are not portable to other programs.
|
||||||
portable data format may be implemented in the future. The tricky part is encoding the necessary metadata.)
|
|
||||||
|
*Optional GraphML output*
|
||||||
|
|
||||||
|
For portability of graph data to other software, turn on [GraphML](http://graphml.graphdrawing.org/index.html) output
|
||||||
|
in the Options menu in interactive mode, or use the `-graphml`command line argument. This will produce a .graphml file
|
||||||
|
for the weighted graph, with vertex attributes for sequence, type, and occupancy data. This graph contains all the data
|
||||||
|
necessary for the BiGpairSEQ matching algorithm. It does not include the data to measure pairing accuracy; for that,
|
||||||
|
compare the matching results to the original Cell Sample .csv file.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
#### Matching Results Files
|
#### Matching Results Files
|
||||||
Matching results files consist of the results of a BiGpairSEQ matching simulation.
|
Matching results files consist of the results of a BiGpairSEQ matching simulation. Making them requires a serialized
|
||||||
Files are in CSV format. Rows are sequence pairings with extra relevant data. Columns are pairing-specific details.
|
binary Graph/Data file (.ser). (Because .graphML files are larger than .ser files, BiGpairSEQ_Sim supports .graphML
|
||||||
|
output only. Graph/data input must use a serialized binary.)
|
||||||
|
|
||||||
|
Matching results files are in CSV format. Rows are sequence pairings with extra relevant data. Columns are pairing-specific details.
|
||||||
Metadata about the matching simulation is included as comments. Comments are preceded by `#`.
|
Metadata about the matching simulation is included as comments. Comments are preceded by `#`.
|
||||||
|
|
||||||
Options when running a BiGpairSEQ simulation of CDR3 alpha/beta matching:
|
Options when running a BiGpairSEQ simulation of CDR3 alpha/beta matching:
|
||||||
@@ -188,7 +237,6 @@ Options when running a BiGpairSEQ simulation of CDR3 alpha/beta matching:
|
|||||||
|
|
||||||
Example output:
|
Example output:
|
||||||
|
|
||||||
---
|
|
||||||
```
|
```
|
||||||
# Source Sample Plate file: 4MilCellsPlate.csv
|
# Source Sample Plate file: 4MilCellsPlate.csv
|
||||||
# Source Graph and Data file: 4MilCellsPlateGraph.ser
|
# Source Graph and Data file: 4MilCellsPlateGraph.ser
|
||||||
@@ -220,46 +268,117 @@ Example output:
|
|||||||
P-values are calculated *after* BiGpairSEQ matching is completed, for purposes of comparison only,
|
P-values are calculated *after* BiGpairSEQ matching is completed, for purposes of comparison only,
|
||||||
using the (2021 corrected) formula from the original pairSEQ paper. (Howie, et al. 2015)
|
using the (2021 corrected) formula from the original pairSEQ paper. (Howie, et al. 2015)
|
||||||
|
|
||||||
### PERFORMANCE
|
|
||||||
Performance details of the example excerpted above:
|
## PERFORMANCE (old results; need updating to reflect current, improved simulator performance)
|
||||||
|
|
||||||
On a home computer with a Ryzen 5600X CPU, 64GB of 3200MHz DDR4 RAM (half of which was allocated to the Java Virtual Machine), and a PCIe 3.0 SSD, running Linux Mint 20.3 Edge (5.13 kernel),
|
On a home computer with a Ryzen 5600X CPU, 64GB of 3200MHz DDR4 RAM (half of which was allocated to the Java Virtual Machine), and a PCIe 3.0 SSD, running Linux Mint 20.3 Edge (5.13 kernel),
|
||||||
the author ran a BiGpairSEQ simulation of a 96-well sample plate with 30,000 T cells/well comprising ~11,800 alphas and betas,
|
the author ran a BiGpairSEQ simulation of a 96-well sample plate with 30,000 T cells/well comprising ~11,800 alphas and betas,
|
||||||
taken from a sample of 4,000,000 distinct cells with an exponential frequency distribution.
|
taken from a sample of 4,000,000 distinct cells with an exponential frequency distribution (lambda 0.6).
|
||||||
|
|
||||||
With min/max occupancy threshold of 3 and 94 wells for matching, and no other pre-filtering, BiGpairSEQ identified 5,151
|
With min/max occupancy threshold of 3 and 94 wells for matching, and no other pre-filtering, BiGpairSEQ identified 5,151
|
||||||
correct pairings and 18 incorrect pairings, for an accuracy of 99.652%.
|
correct pairings and 18 incorrect pairings, for an accuracy of 99.652%.
|
||||||
|
|
||||||
The simulation time was 14'22". If intermediate results were held in memory, this would be equivalent to the total elapsed time.
|
The total simulation time was 14'22". If intermediate results were held in memory, this would be equivalent to the total elapsed time.
|
||||||
|
|
||||||
Since this implementation of BiGpairSEQ writes intermediate results to disk (to improve the efficiency of *repeated* simulations
|
Since this implementation of BiGpairSEQ writes intermediate results to disk (to improve the efficiency of *repeated* simulations
|
||||||
with different filtering options), the actual elapsed time was greater. File I/O time was not measured, but took
|
with different filtering options), the actual elapsed time was greater. File I/O time was not measured, but took
|
||||||
slightly less time than the simulation itself. Real elapsed time from start to finish was under 30 minutes.
|
slightly less time than the simulation itself. Real elapsed time from start to finish was under 30 minutes.
|
||||||
|
|
||||||
|
As mentioned in the theory section, performance could be improved by implementing a more efficient algorithm for finding
|
||||||
|
the maximum weight matching.
|
||||||
|
|
||||||
|
## BEHAVIOR WITH RANDOMIZED WELL POPULATIONS
|
||||||
|
|
||||||
|
A series of BiGpairSEQ simulations were conducted using a cell sample file of 3.5 million unique T cells. From these cells,
|
||||||
|
10 sample plate files were created. All of these sample plates had 96 wells, used an exponential distribution with a lambda of 0.6, and
|
||||||
|
had a sequence dropout rate of 10%.
|
||||||
|
|
||||||
|
The well populations of the plates were:
|
||||||
|
* One sample plate with 1000 T cells/well
|
||||||
|
* One sample plate with 2000 T cells/well
|
||||||
|
* One sample plate with 3000 T cells/well
|
||||||
|
* One sample plate with 4000 T cells/well
|
||||||
|
* One sample plate with 5000 T cells/well
|
||||||
|
* Five sample plates with each individual well's population randomized, from 1000 to 5000 T cells. (Average population ~3000 T cells/well.)
|
||||||
|
|
||||||
|
All BiGpairSEQ simulations were run with a low overlap threshold of 3 and a high overlap threshold of 94.
|
||||||
|
No optional filters were used, so pairing was attempted for all sequences with overlaps within the threshold values.
|
||||||
|
|
||||||
|
Constant well population plate results:
|
||||||
|
|
||||||
|
| |1000 Cell/Well Plate|2000 Cell/Well Plate|3000 Cell/Well Plate|4000 Cell/Well Plate|5000 Cell/Well Plate
|
||||||
|
|---|---|---|---|---|---|
|
||||||
|
|Total Alphas Found|6407|7330|7936|8278|8553|
|
||||||
|
|Total Betas Found|6405|7333|7968|8269|8582|
|
||||||
|
|Pairing Attempt Rate|0.661|0.653|0.600|0.579|0.559|
|
||||||
|
|Correct Pairing Count|4231|4749|4723|4761|4750|
|
||||||
|
|Incorrect Pairing Count|3|34|40|26|29|
|
||||||
|
|Pairing Error Rate|0.000709|0.00711|0.00840|0.00543|0.00607|
|
||||||
|
|Simulation Time (Seconds)|500|643|700|589|598|
|
||||||
|
|
||||||
|
Randomized well population plate results:
|
||||||
|
|
||||||
|
| |Random Plate 1 | Random Plate 2|Random Plate 3|Random Plate 4|Random Plate 5|Average|
|
||||||
|
|---|---|---|---|---|---|---|
|
||||||
|
Total Alphas Found|7853|7904|7964|7898|7917|7907|
|
||||||
|
Total Betas Found|7851|7891|7920|7910|7894|7893|
|
||||||
|
Pairing Attempt Rate|0.607|0.610|0.601|0.605|0.603|0.605|
|
||||||
|
Correct Pairing Count|4718|4782|4721|4755|4731|4741|
|
||||||
|
Incorrect Pairing Count|51|35|42|27|29|37|
|
||||||
|
Pairing Error Rate|0.0107|0.00727|0.00882|0.00565|0.00609|0.00771|
|
||||||
|
Simulation Time (Seconds)|590|677|730|618|615|646|
|
||||||
|
|
||||||
|
The average results for the randomized plates are closest to the constant plate with 3000 T cells/well.
|
||||||
|
This and several other tests indicate that BiGpairSEQ treats a sample plate with a highly variable number of T cells/well
|
||||||
|
roughly as though it had a constant well population equal to the plate's average well population.
|
||||||
|
|
||||||
## TODO
|
## TODO
|
||||||
|
|
||||||
* ~~Try invoking GC at end of workloads to reduce paging to disk~~ DONE
|
* ~~Try invoking GC at end of workloads to reduce paging to disk~~ DONE
|
||||||
* Hold graph data in memory until another graph is read-in? ~~ABANDONED~~ ~~UNABANDONED~~ DONE
|
* ~~Hold graph data in memory until another graph is read-in? ABANDONED UNABANDONED~~ DONE
|
||||||
* ~~*No, this won't work, because BiGpairSEQ simulations alter the underlying graph based on filtering constraints. Changes would cascade with multiple experiments.*~~
|
* ~~*No, this won't work, because BiGpairSEQ simulations alter the underlying graph based on filtering constraints. Changes would cascade with multiple experiments.*~~
|
||||||
* Might have figured out a way to do it, by taking edges out and then putting them back into the graph. This may actually be possible. If so, awesome.
|
* Might have figured out a way to do it, by taking edges out and then putting them back into the graph. This may actually be possible.
|
||||||
* See if there's a reasonable way to reformat Sample Plate files so that wells are columns instead of rows.
|
* It is possible, though the modifications to the graph incur their own performance penalties. Need testing to see which option is best. It may be computer-specific.
|
||||||
|
* ~~Test whether pairing heap (currently used) or Fibonacci heap is more efficient for priority queue in current matching algorithm~~ DONE
|
||||||
|
* ~~in theory Fibonacci heap should be more efficient, but complexity overhead may eliminate theoretical advantage~~
|
||||||
|
* ~~Add controllable heap-type parameter?~~
|
||||||
|
* Parameter implemented. Fibonacci heap the current default.
|
||||||
|
* ~~Implement sample plates with random numbers of T cells per well.~~ DONE
|
||||||
|
* Possible BiGpairSEQ advantage over pairSEQ: BiGpairSEQ is resilient to variations in well population sizes on a sample plate; pairSEQ is not due to nature of probability calculations.
|
||||||
|
* preliminary data suggests that BiGpairSEQ behaves roughly as though the whole plate had whatever the *average* well concentration is, but that's still speculative.
|
||||||
|
* ~~See if there's a reasonable way to reformat Sample Plate files so that wells are columns instead of rows.~~
|
||||||
* ~~Problem is variable number of cells in a well~~
|
* ~~Problem is variable number of cells in a well~~
|
||||||
* ~~Apache Commons CSV library writes entries a row at a time~~
|
* ~~Apache Commons CSV library writes entries a row at a time~~
|
||||||
* _Got this working, but at the cost of a profoundly strange bug in graph occupancy filtering. Have reverted the repo until I can figure out what caused that. Given how easily Thingiverse transposes CSV matrices in R, might not even be worth fixing._
|
* Got this working, but at the cost of a profoundly strange bug in graph occupancy filtering. Have reverted the repo until I can figure out what caused that. Given how easily Thingiverse transposes CSV matrices in R, might not even be worth fixing.
|
||||||
* Re-implement command line arguments, to enable scripting and statistical simulation studies
|
* ~~Enable GraphML output in addition to serialized object binaries, for data portability~~ DONE
|
||||||
* Implement sample plates with random numbers of T cells per well.
|
* ~~Have a branch where this is implemented, but there's a bug that broke matching. Don't currently have time to fix.~~
|
||||||
* Possible BiGpairSEQ advantage over pairSEQ: BiGpairSEQ is resilient to variations in well population sizes on a sample plate; pairSEQ is not.
|
* ~~Re-implement command line arguments, to enable scripting and statistical simulation studies~~ DONE
|
||||||
* preliminary data suggests that BiGpairSEQ behaves roughly as though the whole plate had whatever the *average* well concentration is, but that's still speculative.
|
* ~~Implement custom Vertex class to simplify code and make it easier to implement different MWM algorithms~~ DONE
|
||||||
* Enable GraphML output in addition to serialized object binaries, for data portability
|
* Advantage: would eliminate the need to use maps to associate vertices with sequences, which would make the code easier to understand.
|
||||||
* Custom vertex type with attribute for sequence occupancy?
|
* This also seems to be faster when using the same algorithm than the version with lots of maps, which is a nice bonus!
|
||||||
|
* ~~Implement simulation of read depth, and of read errors. Pre-filter graph for difference in read count to eliminate spurious sequences.~~ DONE
|
||||||
|
* Pre-filtering based on comparing (read depth) * (occupancy) to (read count) for each sequence works extremely well
|
||||||
|
* ~~Add read depth simulation options to CLI~~ DONE
|
||||||
|
* ~~Update graphml output to reflect current Vertex class attributes~~ DONE
|
||||||
|
* Individual well data from the SequenceRecords could be included, if there's ever a reason for it
|
||||||
|
* Update matching metadata output options in CLI
|
||||||
|
* Update performance data in this readme
|
||||||
* Re-implement CDR1 matching method
|
* Re-implement CDR1 matching method
|
||||||
|
* Refactor simulator code to collect all needed data in a single scan of the plate
|
||||||
|
* Currently it scans once for the vertices and then again for the edge weights. This made simulating read depth awkward, and incompatible with caching of plate files.
|
||||||
|
* This would be a fairly major rewrite of the simulator code, but could make things faster, and would definitely make them cleaner.
|
||||||
* Implement Duan and Su's maximum weight matching algorithm
|
* Implement Duan and Su's maximum weight matching algorithm
|
||||||
* Add controllable algorithm-type parameter?
|
* Add controllable algorithm-type parameter?
|
||||||
* Test whether pairing heap (currently used) or Fibonacci heap is more efficient for priority queue in current matching algorithm
|
* This would be fun and valuable, but probably take more time than I have for a hobby project.
|
||||||
* in theory Fibonacci heap should be more efficient, but complexity overhead may eliminate theoretical advantage
|
* Implement an auction algorithm for maximum weight matching
|
||||||
* Add controllable heap-type parameter?
|
* Implement an algorithm for approximating a maximum weight matching
|
||||||
|
* Some of these run in linear or near-linear time
|
||||||
|
* given that the underlying biological samples have many, many sources of error, this would probably be the most useful option in practice. It seems less mathematically elegant, though, and so less fun for me.
|
||||||
|
* Implement Vose's alias method for arbitrary statistical distributions of cells
|
||||||
|
* Should probably refactor to use apache commons rng for this
|
||||||
|
* Use commons JCS for caching
|
||||||
|
* Parameterize pre-filtering. Currently, sequences present in all wells are filtered out before constructing the graph, which massively reduces graph size. But, ideally, no pre-filtering would be necessary.
|
||||||
|
|
||||||
|
|
||||||
## CITATIONS
|
## CITATIONS
|
||||||
* Howie, B., Sherwood, A. M., et al. ["High-throughput pairing of T cell receptor alpha and beta sequences."](https://pubmed.ncbi.nlm.nih.gov/26290413/) Sci. Transl. Med. 7, 301ra131 (2015)
|
* Howie, B., Sherwood, A. M., et al. ["High-throughput pairing of T cell receptor alpha and beta sequences."](https://pubmed.ncbi.nlm.nih.gov/26290413/) Sci. Transl. Med. 7, 301ra131 (2015)
|
||||||
@@ -271,7 +390,7 @@ slightly less time than the simulation itself. Real elapsed time from start to f
|
|||||||
* [JGraphT](https://jgrapht.org) -- Graph theory data structures and algorithms
|
* [JGraphT](https://jgrapht.org) -- Graph theory data structures and algorithms
|
||||||
* [JHeaps](https://www.jheaps.org) -- For pairing heap priority queue used in maximum weight matching algorithm
|
* [JHeaps](https://www.jheaps.org) -- For pairing heap priority queue used in maximum weight matching algorithm
|
||||||
* [Apache Commons CSV](https://commons.apache.org/proper/commons-csv/) -- For CSV file output
|
* [Apache Commons CSV](https://commons.apache.org/proper/commons-csv/) -- For CSV file output
|
||||||
* [Apache Commons CLI](https://commons.apache.org/proper/commons-cli/) -- To enable command line arguments for scripting. (**Awaiting re-implementation**.)
|
* [Apache Commons CLI](https://commons.apache.org/proper/commons-cli/) -- To enable command line arguments for scripting.
|
||||||
|
|
||||||
## ACKNOWLEDGEMENTS
|
## ACKNOWLEDGEMENTS
|
||||||
BiGpairSEQ was conceived in collaboration with Dr. Alice MacQueen, who brought the original
|
BiGpairSEQ was conceived in collaboration with Dr. Alice MacQueen, who brought the original
|
||||||
|
|||||||
@@ -1,8 +1,22 @@
|
|||||||
//main class. Only job is to choose which interface to use, and hold graph data in memory
|
import java.util.Random;
|
||||||
|
|
||||||
|
//main class. For choosing interface type and holding settings
|
||||||
public class BiGpairSEQ {
|
public class BiGpairSEQ {
|
||||||
|
|
||||||
|
private static final Random rand = new Random();
|
||||||
|
private static CellSample cellSampleInMemory = null;
|
||||||
|
private static String cellFilename = null;
|
||||||
|
private static Plate plateInMemory = null;
|
||||||
|
private static String plateFilename = null;
|
||||||
private static GraphWithMapData graphInMemory = null;
|
private static GraphWithMapData graphInMemory = null;
|
||||||
private static String graphFilename = null;
|
private static String graphFilename = null;
|
||||||
|
private static boolean cacheCells = false;
|
||||||
|
private static boolean cachePlate = false;
|
||||||
|
private static boolean cacheGraph = false;
|
||||||
|
private static HeapType priorityQueueHeapType = HeapType.FIBONACCI;
|
||||||
|
private static boolean outputBinary = true;
|
||||||
|
private static boolean outputGraphML = false;
|
||||||
|
private static final String version = "version 3.0";
|
||||||
|
|
||||||
public static void main(String[] args) {
|
public static void main(String[] args) {
|
||||||
if (args.length == 0) {
|
if (args.length == 0) {
|
||||||
@@ -10,33 +24,154 @@ public class BiGpairSEQ {
|
|||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
//This will be uncommented when command line arguments are re-implemented.
|
//This will be uncommented when command line arguments are re-implemented.
|
||||||
//CommandLineInterface.startCLI(args);
|
CommandLineInterface.startCLI(args);
|
||||||
System.out.println("Command line arguments are still being re-implemented.");
|
//System.out.println("Command line arguments are still being re-implemented.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public static GraphWithMapData getGraph() {
|
public static Random getRand() {
|
||||||
return graphInMemory;
|
return rand;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void setGraph(GraphWithMapData g) {
|
public static CellSample getCellSampleInMemory() {
|
||||||
|
return cellSampleInMemory;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void setCellSampleInMemory(CellSample cellSample, String filename) {
|
||||||
|
if(cellSampleInMemory != null) {
|
||||||
|
clearCellSampleInMemory();
|
||||||
|
}
|
||||||
|
cellSampleInMemory = cellSample;
|
||||||
|
cellFilename = filename;
|
||||||
|
System.out.println("Cell sample file " + filename + " cached.");
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void clearCellSampleInMemory() {
|
||||||
|
cellSampleInMemory = null;
|
||||||
|
cellFilename = null;
|
||||||
|
System.gc();
|
||||||
|
System.out.println("Cell sample file cache cleared.");
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public static String getCellFilename() {
|
||||||
|
return cellFilename;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Plate getPlateInMemory() {
|
||||||
|
return plateInMemory;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void setPlateInMemory(Plate plate, String filename) {
|
||||||
|
if(plateInMemory != null) {
|
||||||
|
clearPlateInMemory();
|
||||||
|
}
|
||||||
|
plateInMemory = plate;
|
||||||
|
plateFilename = filename;
|
||||||
|
System.out.println("Sample plate file " + filename + " cached.");
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void clearPlateInMemory() {
|
||||||
|
plateInMemory = null;
|
||||||
|
plateFilename = null;
|
||||||
|
System.gc();
|
||||||
|
System.out.println("Sample plate file cache cleared.");
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public static String getPlateFilename() {
|
||||||
|
return plateFilename;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public static GraphWithMapData getGraphInMemory() {return graphInMemory;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void setGraphInMemory(GraphWithMapData g, String filename) {
|
||||||
if (graphInMemory != null) {
|
if (graphInMemory != null) {
|
||||||
clearGraph();
|
clearGraphInMemory();
|
||||||
}
|
}
|
||||||
graphInMemory = g;
|
graphInMemory = g;
|
||||||
|
graphFilename = filename;
|
||||||
|
System.out.println("Graph and data file " + filename + " cached.");
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void clearGraph() {
|
public static void clearGraphInMemory() {
|
||||||
graphInMemory = null;
|
graphInMemory = null;
|
||||||
|
graphFilename = null;
|
||||||
System.gc();
|
System.gc();
|
||||||
|
System.out.println("Graph and data file cache cleared.");
|
||||||
}
|
}
|
||||||
|
|
||||||
public static String getGraphFilename() {
|
public static String getGraphFilename() {
|
||||||
return graphFilename;
|
return graphFilename;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void setGraphFilename(String filename) {
|
|
||||||
graphFilename = filename;
|
public static boolean cacheCells() {
|
||||||
|
return cacheCells;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static void setCacheCells(boolean cacheCells) {
|
||||||
|
//if not caching, clear the memory
|
||||||
|
if(!cacheCells){
|
||||||
|
BiGpairSEQ.clearCellSampleInMemory();
|
||||||
|
System.out.println("Cell sample file caching: OFF.");
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
System.out.println("Cell sample file caching: ON.");
|
||||||
|
}
|
||||||
|
BiGpairSEQ.cacheCells = cacheCells;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static boolean cachePlate() {
|
||||||
|
return cachePlate;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void setCachePlate(boolean cachePlate) {
|
||||||
|
//if not caching, clear the memory
|
||||||
|
if(!cachePlate) {
|
||||||
|
BiGpairSEQ.clearPlateInMemory();
|
||||||
|
System.out.println("Sample plate file caching: OFF.");
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
System.out.println("Sample plate file caching: ON.");
|
||||||
|
}
|
||||||
|
BiGpairSEQ.cachePlate = cachePlate;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static boolean cacheGraph() {
|
||||||
|
return cacheGraph;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void setCacheGraph(boolean cacheGraph) {
|
||||||
|
//if not caching, clear the memory
|
||||||
|
if(!cacheGraph) {
|
||||||
|
BiGpairSEQ.clearGraphInMemory();
|
||||||
|
System.out.println("Graph/data file caching: OFF.");
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
System.out.println("Graph/data file caching: ON.");
|
||||||
|
}
|
||||||
|
BiGpairSEQ.cacheGraph = cacheGraph;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static String getPriorityQueueHeapType() {
|
||||||
|
return priorityQueueHeapType.name();
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void setPairingHeap() {
|
||||||
|
priorityQueueHeapType = HeapType.PAIRING;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void setFibonacciHeap() {
|
||||||
|
priorityQueueHeapType = HeapType.FIBONACCI;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static boolean outputBinary() {return outputBinary;}
|
||||||
|
public static void setOutputBinary(boolean b) {outputBinary = b;}
|
||||||
|
|
||||||
|
public static boolean outputGraphML() {return outputGraphML;}
|
||||||
|
public static void setOutputGraphML(boolean b) {outputGraphML = b;}
|
||||||
|
public static String getVersion() { return version; }
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -12,7 +12,8 @@ import java.util.List;
|
|||||||
public class CellFileReader {
|
public class CellFileReader {
|
||||||
|
|
||||||
private String filename;
|
private String filename;
|
||||||
private List<Integer[]> distinctCells = new ArrayList<>();
|
private List<String[]> distinctCells = new ArrayList<>();
|
||||||
|
private Integer cdr1Freq;
|
||||||
|
|
||||||
public CellFileReader(String filename) {
|
public CellFileReader(String filename) {
|
||||||
if(!filename.matches(".*\\.csv")){
|
if(!filename.matches(".*\\.csv")){
|
||||||
@@ -31,26 +32,34 @@ public class CellFileReader {
|
|||||||
CSVParser parser = new CSVParser(reader, cellFileFormat);
|
CSVParser parser = new CSVParser(reader, cellFileFormat);
|
||||||
){
|
){
|
||||||
for(CSVRecord record: parser.getRecords()) {
|
for(CSVRecord record: parser.getRecords()) {
|
||||||
Integer[] cell = new Integer[4];
|
String[] cell = new String[4];
|
||||||
cell[0] = Integer.valueOf(record.get("Alpha CDR3"));
|
cell[0] = record.get("Alpha CDR3");
|
||||||
cell[1] = Integer.valueOf(record.get("Beta CDR3"));
|
cell[1] = record.get("Beta CDR3");
|
||||||
cell[2] = Integer.valueOf(record.get("Alpha CDR1"));
|
cell[2] = record.get("Alpha CDR1");
|
||||||
cell[3] = Integer.valueOf(record.get("Beta CDR1"));
|
cell[3] = record.get("Beta CDR1");
|
||||||
distinctCells.add(cell);
|
distinctCells.add(cell);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
} catch(IOException ex){
|
} catch(IOException ex){
|
||||||
System.out.println("cell file " + filename + " not found.");
|
System.out.println("cell file " + filename + " not found.");
|
||||||
System.err.println(ex);
|
System.err.println(ex);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//get CDR1 frequency
|
||||||
|
ArrayList<String> cdr1Alphas = new ArrayList<>();
|
||||||
|
for (String[] cell : distinctCells) {
|
||||||
|
cdr1Alphas.add(cell[3]);
|
||||||
|
}
|
||||||
|
double count = cdr1Alphas.stream().distinct().count();
|
||||||
|
count = Math.ceil(distinctCells.size() / count);
|
||||||
|
cdr1Freq = (int) count;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public CellSample getCellSample() {
|
||||||
|
return new CellSample(distinctCells, cdr1Freq);
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getFilename() { return filename;}
|
public String getFilename() { return filename;}
|
||||||
|
|
||||||
public List<Integer[]> getCells(){
|
|
||||||
return distinctCells;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Integer getCellCount() {
|
|
||||||
return distinctCells.size();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ import java.util.List;
|
|||||||
public class CellFileWriter {
|
public class CellFileWriter {
|
||||||
|
|
||||||
private String[] headers = {"Alpha CDR3", "Beta CDR3", "Alpha CDR1", "Beta CDR1"};
|
private String[] headers = {"Alpha CDR3", "Beta CDR3", "Alpha CDR1", "Beta CDR1"};
|
||||||
List<Integer[]> cells;
|
List<String[]> cells;
|
||||||
String filename;
|
String filename;
|
||||||
Integer cdr1Freq;
|
Integer cdr1Freq;
|
||||||
|
|
||||||
@@ -35,7 +35,7 @@ public class CellFileWriter {
|
|||||||
printer.printComment("Sample contains 1 unique CDR1 for every " + cdr1Freq + "unique CDR3s.");
|
printer.printComment("Sample contains 1 unique CDR1 for every " + cdr1Freq + "unique CDR3s.");
|
||||||
printer.printRecords(cells);
|
printer.printRecords(cells);
|
||||||
} catch(IOException ex){
|
} catch(IOException ex){
|
||||||
System.out.println("Could not make new file named "+filename);
|
System.out.println("Could not make new file named " + filename);
|
||||||
System.err.println(ex);
|
System.err.println(ex);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,16 +1,51 @@
|
|||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Collections;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.stream.IntStream;
|
||||||
|
|
||||||
public class CellSample {
|
public class CellSample {
|
||||||
|
|
||||||
private List<Integer[]> cells;
|
private List<String[]> cells;
|
||||||
private Integer cdr1Freq;
|
private Integer cdr1Freq;
|
||||||
|
|
||||||
public CellSample(List<Integer[]> cells, Integer cdr1Freq){
|
public CellSample(Integer numDistinctCells, Integer cdr1Freq){
|
||||||
|
this.cdr1Freq = cdr1Freq;
|
||||||
|
List<Integer> numbersCDR3 = new ArrayList<>();
|
||||||
|
List<Integer> numbersCDR1 = new ArrayList<>();
|
||||||
|
Integer numDistCDR3s = 2 * numDistinctCells + 1;
|
||||||
|
//Assign consecutive integers for each CDR3. This ensures they are all unique.
|
||||||
|
IntStream.range(1, numDistCDR3s + 1).forEach(i -> numbersCDR3.add(i));
|
||||||
|
//After all CDR3s are assigned, start assigning consecutive integers to CDR1s
|
||||||
|
//There will usually be fewer integers in the CDR1 list, which will allow repeats below
|
||||||
|
IntStream.range(numDistCDR3s + 1, numDistCDR3s + 1 + (numDistCDR3s / cdr1Freq) + 1).forEach(i -> numbersCDR1.add(i));
|
||||||
|
//randomize the order of the numbers in the lists
|
||||||
|
Collections.shuffle(numbersCDR3);
|
||||||
|
Collections.shuffle(numbersCDR1);
|
||||||
|
|
||||||
|
//Each cell represented by 4 values
|
||||||
|
//two CDR3s, and two CDR1s. First two values are CDR3s (alpha, beta), second two are CDR1s (alpha, beta)
|
||||||
|
List<String[]> distinctCells = new ArrayList<>();
|
||||||
|
for(int i = 0; i < numbersCDR3.size() - 1; i = i + 2){
|
||||||
|
//Go through entire CDR3 list once, make pairs of alphas and betas
|
||||||
|
String tmpCDR3a = numbersCDR3.get(i).toString();
|
||||||
|
String tmpCDR3b = numbersCDR3.get(i+1).toString();
|
||||||
|
//Go through the (likely shorter) CDR1 list as many times as necessary, make pairs of alphas and betas
|
||||||
|
String tmpCDR1a = numbersCDR1.get(i % numbersCDR1.size()).toString();
|
||||||
|
String tmpCDR1b = numbersCDR1.get((i+1) % numbersCDR1.size()).toString();
|
||||||
|
//Make the array representing the cell
|
||||||
|
String[] tmp = {tmpCDR3a, tmpCDR3b, tmpCDR1a, tmpCDR1b};
|
||||||
|
//Add the cell to the list of distinct cells
|
||||||
|
distinctCells.add(tmp);
|
||||||
|
}
|
||||||
|
this.cells = distinctCells;
|
||||||
|
}
|
||||||
|
|
||||||
|
public CellSample(List<String[]> cells, Integer cdr1Freq){
|
||||||
this.cells = cells;
|
this.cells = cells;
|
||||||
this.cdr1Freq = cdr1Freq;
|
this.cdr1Freq = cdr1Freq;
|
||||||
}
|
}
|
||||||
|
|
||||||
public List<Integer[]> getCells(){
|
public List<String[]> getCells(){
|
||||||
return cells;
|
return cells;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -18,7 +53,7 @@ public class CellSample {
|
|||||||
return cdr1Freq;
|
return cdr1Freq;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Integer population(){
|
public Integer getCellCount(){
|
||||||
return cells.size();
|
return cells.size();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,9 @@
|
|||||||
import org.apache.commons.cli.*;
|
import org.apache.commons.cli.*;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Class for parsing options passed to program from command line
|
* Class for parsing options passed to program from command line
|
||||||
*
|
*
|
||||||
@@ -29,6 +33,12 @@ import org.apache.commons.cli.*;
|
|||||||
* cellfile : name of the cell sample file to use as input
|
* cellfile : name of the cell sample file to use as input
|
||||||
* platefile : name of the sample plate file to use as input
|
* platefile : name of the sample plate file to use as input
|
||||||
* output : name of the output file
|
* output : name of the output file
|
||||||
|
* graphml : output a graphml file
|
||||||
|
* binary : output a serialized binary object file
|
||||||
|
* IF SIMULATING READ DEPTH, ALL THESE ARE REQUIRED. Absence indicates not simulating read depth
|
||||||
|
* readdepth: number of reads per sequence
|
||||||
|
* readerrorprob: probability of reading a sequence incorrectly
|
||||||
|
* errcollisionprob: probability of two read errors being identical
|
||||||
*
|
*
|
||||||
* Match flags:
|
* Match flags:
|
||||||
* graphFile : name of graph and data file to use as input
|
* graphFile : name of graph and data file to use as input
|
||||||
@@ -43,242 +53,185 @@ import org.apache.commons.cli.*;
|
|||||||
public class CommandLineInterface {
|
public class CommandLineInterface {
|
||||||
|
|
||||||
public static void startCLI(String[] args) {
|
public static void startCLI(String[] args) {
|
||||||
//These command line options are a big mess
|
//Options sets for the different modes
|
||||||
//Really, I don't think command line tools are expected to work in this many different modes
|
Options mainOptions = buildMainOptions();
|
||||||
//making cells, making plates, and matching are the sort of thing that UNIX philosophy would say
|
Options cellOptions = buildCellOptions();
|
||||||
//should be three separate programs.
|
Options plateOptions = buildPlateOptions();
|
||||||
//There might be a way to do it with option parameters?
|
Options graphOptions = buildGraphOptions();
|
||||||
|
Options matchOptions = buildMatchCDR3options();
|
||||||
//main options set
|
|
||||||
Options mainOptions = new Options();
|
|
||||||
Option makeCells = Option.builder("cells")
|
|
||||||
.longOpt("make-cells")
|
|
||||||
.desc("Makes a file of distinct cells")
|
|
||||||
.build();
|
|
||||||
Option makePlate = Option.builder("plates")
|
|
||||||
.longOpt("make-plates")
|
|
||||||
.desc("Makes a sample plate file")
|
|
||||||
.build();
|
|
||||||
Option makeGraph = Option.builder("graph")
|
|
||||||
.longOpt("make-graph")
|
|
||||||
.desc("Makes a graph and data file")
|
|
||||||
.build();
|
|
||||||
Option matchCDR3 = Option.builder("match")
|
|
||||||
.longOpt("match-cdr3")
|
|
||||||
.desc("Match CDR3s. Requires a cell sample file and any number of plate files.")
|
|
||||||
.build();
|
|
||||||
OptionGroup mainGroup = new OptionGroup();
|
|
||||||
mainGroup.addOption(makeCells);
|
|
||||||
mainGroup.addOption(makePlate);
|
|
||||||
mainGroup.addOption(makeGraph);
|
|
||||||
mainGroup.addOption(matchCDR3);
|
|
||||||
mainGroup.setRequired(true);
|
|
||||||
mainOptions.addOptionGroup(mainGroup);
|
|
||||||
|
|
||||||
//Reuse clones of this for other options groups, rather than making it lots of times
|
|
||||||
Option outputFile = Option.builder("o")
|
|
||||||
.longOpt("output-file")
|
|
||||||
.hasArg()
|
|
||||||
.argName("filename")
|
|
||||||
.desc("Name of output file")
|
|
||||||
.build();
|
|
||||||
mainOptions.addOption(outputFile);
|
|
||||||
|
|
||||||
//Options cellOptions = new Options();
|
|
||||||
Option numCells = Option.builder("nc")
|
|
||||||
.longOpt("num-cells")
|
|
||||||
.desc("The number of distinct cells to generate")
|
|
||||||
.hasArg()
|
|
||||||
.argName("number")
|
|
||||||
.build();
|
|
||||||
mainOptions.addOption(numCells);
|
|
||||||
Option cdr1Freq = Option.builder("d")
|
|
||||||
.longOpt("peptide-diversity-factor")
|
|
||||||
.hasArg()
|
|
||||||
.argName("number")
|
|
||||||
.desc("Number of distinct CDR3s for every CDR1")
|
|
||||||
.build();
|
|
||||||
mainOptions.addOption(cdr1Freq);
|
|
||||||
//Option cellOutput = (Option) outputFile.clone();
|
|
||||||
//cellOutput.setRequired(true);
|
|
||||||
//mainOptions.addOption(cellOutput);
|
|
||||||
|
|
||||||
//Options plateOptions = new Options();
|
|
||||||
Option inputCells = Option.builder("c")
|
|
||||||
.longOpt("cell-file")
|
|
||||||
.hasArg()
|
|
||||||
.argName("file")
|
|
||||||
.desc("The cell sample file used for filling wells")
|
|
||||||
.build();
|
|
||||||
mainOptions.addOption(inputCells);
|
|
||||||
Option numWells = Option.builder("w")
|
|
||||||
.longOpt("num-wells")
|
|
||||||
.hasArg()
|
|
||||||
.argName("number")
|
|
||||||
.desc("The number of wells on each plate")
|
|
||||||
.build();
|
|
||||||
mainOptions.addOption(numWells);
|
|
||||||
Option numPlates = Option.builder("np")
|
|
||||||
.longOpt("num-plates")
|
|
||||||
.hasArg()
|
|
||||||
.argName("number")
|
|
||||||
.desc("The number of plate files to output")
|
|
||||||
.build();
|
|
||||||
mainOptions.addOption(numPlates);
|
|
||||||
//Option plateOutput = (Option) outputFile.clone();
|
|
||||||
//plateOutput.setRequired(true);
|
|
||||||
//plateOutput.setDescription("Prefix for plate output filenames");
|
|
||||||
//mainOptions.addOption(plateOutput);
|
|
||||||
Option plateErr = Option.builder("err")
|
|
||||||
.longOpt("drop-out-rate")
|
|
||||||
.hasArg()
|
|
||||||
.argName("number")
|
|
||||||
.desc("Well drop-out rate. (Probability between 0 and 1)")
|
|
||||||
.build();
|
|
||||||
mainOptions.addOption(plateErr);
|
|
||||||
Option plateConcentrations = Option.builder("t")
|
|
||||||
.longOpt("t-cells-per-well")
|
|
||||||
.hasArgs()
|
|
||||||
.argName("number 1, number 2, ...")
|
|
||||||
.desc("Number of T cells per well for each plate section")
|
|
||||||
.build();
|
|
||||||
mainOptions.addOption(plateConcentrations);
|
|
||||||
|
|
||||||
//different distributions, mutually exclusive
|
|
||||||
OptionGroup plateDistributions = new OptionGroup();
|
|
||||||
Option plateExp = Option.builder("exponential")
|
|
||||||
.desc("Sample from distinct cells with exponential frequency distribution")
|
|
||||||
.build();
|
|
||||||
plateDistributions.addOption(plateExp);
|
|
||||||
Option plateGaussian = Option.builder("gaussian")
|
|
||||||
.desc("Sample from distinct cells with gaussain frequency distribution")
|
|
||||||
.build();
|
|
||||||
plateDistributions.addOption(plateGaussian);
|
|
||||||
Option platePoisson = Option.builder("poisson")
|
|
||||||
.desc("Sample from distinct cells with poisson frequency distribution")
|
|
||||||
.build();
|
|
||||||
plateDistributions.addOption(platePoisson);
|
|
||||||
mainOptions.addOptionGroup(plateDistributions);
|
|
||||||
|
|
||||||
Option plateStdDev = Option.builder("stddev")
|
|
||||||
.desc("Standard deviation for gaussian distribution")
|
|
||||||
.hasArg()
|
|
||||||
.argName("number")
|
|
||||||
.build();
|
|
||||||
mainOptions.addOption(plateStdDev);
|
|
||||||
|
|
||||||
Option plateLambda = Option.builder("lambda")
|
|
||||||
.desc("Lambda for exponential distribution")
|
|
||||||
.hasArg()
|
|
||||||
.argName("number")
|
|
||||||
.build();
|
|
||||||
mainOptions.addOption(plateLambda);
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
//
|
|
||||||
// String cellFile, String filename, Double stdDev,
|
|
||||||
// Integer numWells, Integer numSections,
|
|
||||||
// Integer[] concentrations, Double dropOutRate
|
|
||||||
//
|
|
||||||
|
|
||||||
//Options matchOptions = new Options();
|
|
||||||
inputCells.setDescription("The cell sample file to be used for matching.");
|
|
||||||
mainOptions.addOption(inputCells);
|
|
||||||
Option lowThresh = Option.builder("low")
|
|
||||||
.longOpt("low-threshold")
|
|
||||||
.hasArg()
|
|
||||||
.argName("number")
|
|
||||||
.desc("Sets the minimum occupancy overlap to attempt matching")
|
|
||||||
.build();
|
|
||||||
mainOptions.addOption(lowThresh);
|
|
||||||
Option highThresh = Option.builder("high")
|
|
||||||
.longOpt("high-threshold")
|
|
||||||
.hasArg()
|
|
||||||
.argName("number")
|
|
||||||
.desc("Sets the maximum occupancy overlap to attempt matching")
|
|
||||||
.build();
|
|
||||||
mainOptions.addOption(highThresh);
|
|
||||||
Option occDiff = Option.builder("occdiff")
|
|
||||||
.longOpt("occupancy-difference")
|
|
||||||
.hasArg()
|
|
||||||
.argName("Number")
|
|
||||||
.desc("Maximum difference in alpha/beta occupancy to attempt matching")
|
|
||||||
.build();
|
|
||||||
mainOptions.addOption(occDiff);
|
|
||||||
Option overlapPer = Option.builder("ovper")
|
|
||||||
.longOpt("overlap-percent")
|
|
||||||
.hasArg()
|
|
||||||
.argName("Percent")
|
|
||||||
.desc("Minimum overlap percent to attempt matching (0 -100)")
|
|
||||||
.build();
|
|
||||||
mainOptions.addOption(overlapPer);
|
|
||||||
Option inputPlates = Option.builder("p")
|
|
||||||
.longOpt("plate-files")
|
|
||||||
.hasArgs()
|
|
||||||
.desc("Plate files to match")
|
|
||||||
.build();
|
|
||||||
mainOptions.addOption(inputPlates);
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
CommandLineParser parser = new DefaultParser();
|
CommandLineParser parser = new DefaultParser();
|
||||||
try {
|
try{
|
||||||
CommandLine line = parser.parse(mainOptions, args);
|
CommandLine line = parser.parse(mainOptions, Arrays.copyOfRange(args, 0, 1));
|
||||||
if(line.hasOption("match")){
|
|
||||||
//line = parser.parse(mainOptions, args);
|
if (line.hasOption("help")) {
|
||||||
//String cellFile = line.getOptionValue("c");
|
HelpFormatter formatter = new HelpFormatter();
|
||||||
String graphFile = line.getOptionValue("g");
|
formatter.printHelp("BiGpairSEQ_Sim.jar", mainOptions);
|
||||||
Integer lowThreshold = Integer.valueOf(line.getOptionValue(lowThresh));
|
System.out.println();
|
||||||
Integer highThreshold = Integer.valueOf(line.getOptionValue(highThresh));
|
formatter.printHelp("BiGpairSEQ_Sim.jar -cells", cellOptions);
|
||||||
Integer occupancyDifference = Integer.valueOf(line.getOptionValue(occDiff));
|
System.out.println();
|
||||||
Integer overlapPercent = Integer.valueOf(line.getOptionValue(overlapPer));
|
formatter.printHelp("BiGpairSEQ_Sim.jar -plate", plateOptions);
|
||||||
for(String plate: line.getOptionValues("p")) {
|
System.out.println();
|
||||||
matchCDR3s(graphFile, lowThreshold, highThreshold, occupancyDifference, overlapPercent);
|
formatter.printHelp("BiGpairSEQ_Sim.jar -graph", graphOptions);
|
||||||
}
|
System.out.println();
|
||||||
|
formatter.printHelp("BiGpairSEQ_Sim.jar -match", matchOptions);
|
||||||
}
|
}
|
||||||
else if(line.hasOption("cells")){
|
else if (line.hasOption("version")) {
|
||||||
//line = parser.parse(mainOptions, args);
|
System.out.println("BiGpairSEQ_Sim " + BiGpairSEQ.getVersion());
|
||||||
|
}
|
||||||
|
else if (line.hasOption("cells")) {
|
||||||
|
line = parser.parse(cellOptions, Arrays.copyOfRange(args, 1, args.length));
|
||||||
|
Integer number = Integer.valueOf(line.getOptionValue("n"));
|
||||||
|
Integer diversity = Integer.valueOf(line.getOptionValue("d"));
|
||||||
String filename = line.getOptionValue("o");
|
String filename = line.getOptionValue("o");
|
||||||
Integer numDistCells = Integer.valueOf(line.getOptionValue("nc"));
|
makeCells(filename, number, diversity);
|
||||||
Integer freq = Integer.valueOf(line.getOptionValue("d"));
|
|
||||||
makeCells(filename, numDistCells, freq);
|
|
||||||
}
|
}
|
||||||
else if(line.hasOption("plates")){
|
|
||||||
//line = parser.parse(mainOptions, args);
|
|
||||||
String cellFile = line.getOptionValue("c");
|
|
||||||
String filenamePrefix = line.getOptionValue("o");
|
|
||||||
Integer numWellsOnPlate = Integer.valueOf(line.getOptionValue("w"));
|
|
||||||
Integer numPlatesToMake = Integer.valueOf(line.getOptionValue("np"));
|
|
||||||
String[] concentrationsToUseString = line.getOptionValues("t");
|
|
||||||
Integer numSections = concentrationsToUseString.length;
|
|
||||||
|
|
||||||
Integer[] concentrationsToUse = new Integer[numSections];
|
else if (line.hasOption("plate")) {
|
||||||
for(int i = 0; i <numSections; i++){
|
line = parser.parse(plateOptions, Arrays.copyOfRange(args, 1, args.length));
|
||||||
concentrationsToUse[i] = Integer.valueOf(concentrationsToUseString[i]);
|
//get the cells
|
||||||
|
String cellFilename = line.getOptionValue("c");
|
||||||
|
CellSample cells = getCells(cellFilename);
|
||||||
|
//get the rest of the parameters
|
||||||
|
Integer[] populations;
|
||||||
|
String outputFilename = line.getOptionValue("o");
|
||||||
|
Integer numWells = Integer.parseInt(line.getOptionValue("w"));
|
||||||
|
Double dropoutRate = Double.parseDouble(line.getOptionValue("err"));
|
||||||
|
if (line.hasOption("random")) {
|
||||||
|
//Array holding values of minimum and maximum populations
|
||||||
|
Integer[] min_max = Stream.of(line.getOptionValues("random"))
|
||||||
|
.mapToInt(Integer::parseInt)
|
||||||
|
.boxed()
|
||||||
|
.toArray(Integer[]::new);
|
||||||
|
populations = BiGpairSEQ.getRand().ints(min_max[0], min_max[1] + 1)
|
||||||
|
.limit(numWells)
|
||||||
|
.boxed()
|
||||||
|
.toArray(Integer[]::new);
|
||||||
}
|
}
|
||||||
Double dropOutRate = Double.valueOf(line.getOptionValue("err"));
|
else if (line.hasOption("pop")) {
|
||||||
if(line.hasOption("exponential")){
|
populations = Stream.of(line.getOptionValues("pop"))
|
||||||
Double lambda = Double.valueOf(line.getOptionValue("lambda"));
|
.mapToInt(Integer::parseInt)
|
||||||
for(int i = 1; i <= numPlatesToMake; i++){
|
.boxed()
|
||||||
makePlateExp(cellFile, filenamePrefix + i, lambda, numWellsOnPlate,
|
.toArray(Integer[]::new);
|
||||||
concentrationsToUse,dropOutRate);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
else if(line.hasOption("gaussian")){
|
else{
|
||||||
Double stdDev = Double.valueOf(line.getOptionValue("std-dev"));
|
populations = new Integer[1];
|
||||||
for(int i = 1; i <= numPlatesToMake; i++){
|
populations[0] = 1;
|
||||||
makePlate(cellFile, filenamePrefix + i, stdDev, numWellsOnPlate,
|
}
|
||||||
concentrationsToUse,dropOutRate);
|
//make the plate
|
||||||
}
|
Plate plate;
|
||||||
|
if (line.hasOption("poisson")) {
|
||||||
|
Double stdDev = Math.sqrt(numWells);
|
||||||
|
plate = new Plate(cells, cellFilename, numWells, populations, dropoutRate, stdDev, false);
|
||||||
|
}
|
||||||
|
else if (line.hasOption("gaussian")) {
|
||||||
|
Double stdDev = Double.parseDouble(line.getOptionValue("stddev"));
|
||||||
|
plate = new Plate(cells, cellFilename, numWells, populations, dropoutRate, stdDev, false);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
assert line.hasOption("exponential");
|
||||||
|
Double lambda = Double.parseDouble(line.getOptionValue("lambda"));
|
||||||
|
plate = new Plate(cells, cellFilename, numWells, populations, dropoutRate, lambda, true);
|
||||||
|
}
|
||||||
|
PlateFileWriter writer = new PlateFileWriter(outputFilename, plate);
|
||||||
|
writer.writePlateFile();
|
||||||
|
}
|
||||||
|
|
||||||
|
else if (line.hasOption("graph")) { //Making a graph
|
||||||
|
line = parser.parse(graphOptions, Arrays.copyOfRange(args, 1, args.length));
|
||||||
|
String cellFilename = line.getOptionValue("c");
|
||||||
|
String plateFilename = line.getOptionValue("p");
|
||||||
|
String outputFilename = line.getOptionValue("o");
|
||||||
|
//get cells
|
||||||
|
CellSample cells = getCells(cellFilename);
|
||||||
|
//get plate
|
||||||
|
Plate plate = getPlate(plateFilename);
|
||||||
|
GraphWithMapData graph;
|
||||||
|
Integer readDepth = 1;
|
||||||
|
Double readErrorRate = 0.0;
|
||||||
|
Double errorCollisionRate = 0.0;
|
||||||
|
if (line.hasOption("rd")) {
|
||||||
|
readDepth = Integer.parseInt(line.getOptionValue("rd"));
|
||||||
}
|
}
|
||||||
else if(line.hasOption("poisson")){
|
if (line.hasOption("err")) {
|
||||||
for(int i = 1; i <= numPlatesToMake; i++){
|
readErrorRate = Double.parseDouble(line.getOptionValue("err"));
|
||||||
makePlatePoisson(cellFile, filenamePrefix + i, numWellsOnPlate,
|
}
|
||||||
concentrationsToUse,dropOutRate);
|
if (line.hasOption("coll")) {
|
||||||
|
errorCollisionRate = Double.parseDouble(line.getOptionValue("coll"));
|
||||||
|
}
|
||||||
|
graph = Simulator.makeCDR3Graph(cells, plate, readDepth, readErrorRate, errorCollisionRate, false);
|
||||||
|
if (!line.hasOption("no-binary")) { //output binary file unless told not to
|
||||||
|
GraphDataObjectWriter writer = new GraphDataObjectWriter(outputFilename, graph, false);
|
||||||
|
writer.writeDataToFile();
|
||||||
|
}
|
||||||
|
if (line.hasOption("graphml")) { //if told to, output graphml file
|
||||||
|
GraphMLFileWriter gmlwriter = new GraphMLFileWriter(outputFilename, graph);
|
||||||
|
gmlwriter.writeGraphToFile();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
else if (line.hasOption("match")) { //can add a flag for which match type in future, spit this in two
|
||||||
|
line = parser.parse(matchOptions, Arrays.copyOfRange(args, 1, args.length));
|
||||||
|
String graphFilename = line.getOptionValue("g");
|
||||||
|
|
||||||
|
String outputFilename;
|
||||||
|
if(line.hasOption("o")) {
|
||||||
|
outputFilename = line.getOptionValue("o");
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
outputFilename = null;
|
||||||
|
}
|
||||||
|
Integer minThreshold = Integer.parseInt(line.getOptionValue("min"));
|
||||||
|
Integer maxThreshold = Integer.parseInt(line.getOptionValue("max"));
|
||||||
|
int minOverlapPct;
|
||||||
|
if (line.hasOption("minpct")) { //see if this filter is being used
|
||||||
|
minOverlapPct = Integer.parseInt(line.getOptionValue("minpct"));
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
minOverlapPct = 0;
|
||||||
|
}
|
||||||
|
int maxOccupancyDiff;
|
||||||
|
if (line.hasOption("maxdiff")) { //see if this filter is being used
|
||||||
|
maxOccupancyDiff = Integer.parseInt(line.getOptionValue("maxdiff"));
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
maxOccupancyDiff = Integer.MAX_VALUE;
|
||||||
|
}
|
||||||
|
GraphWithMapData graph = getGraph(graphFilename);
|
||||||
|
MatchingResult result = Simulator.matchCDR3s(graph, graphFilename, minThreshold, maxThreshold,
|
||||||
|
maxOccupancyDiff, minOverlapPct, false);
|
||||||
|
if(outputFilename != null){
|
||||||
|
MatchingFileWriter writer = new MatchingFileWriter(outputFilename, result);
|
||||||
|
writer.writeResultsToFile();
|
||||||
|
}
|
||||||
|
//can put a bunch of ifs for outputting various things from the MatchingResult to System.out here
|
||||||
|
//after I put those flags in the matchOptions
|
||||||
|
if(line.hasOption("print-metadata")) {
|
||||||
|
for (String k : result.getMetadata().keySet()) {
|
||||||
|
System.out.println(k + ": " + result.getMetadata().get(k));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if(line.hasOption("print-error")) {
|
||||||
|
System.out.println("pairing error rate: " + result.getPairingErrorRate());
|
||||||
|
}
|
||||||
|
if(line.hasOption("print-attempt")) {
|
||||||
|
System.out.println("pairing attempt rate: " +result.getPairingAttemptRate());
|
||||||
|
}
|
||||||
|
if(line.hasOption("print-correct")) {
|
||||||
|
System.out.println("correct pairings: " + result.getCorrectPairingCount());
|
||||||
|
}
|
||||||
|
if(line.hasOption("print-incorrect")) {
|
||||||
|
System.out.println("incorrect pairings: " + result.getIncorrectPairingCount());
|
||||||
|
}
|
||||||
|
if(line.hasOption("print-alphas")) {
|
||||||
|
System.out.println("total alphas found: " + result.getAlphaCount());
|
||||||
|
}
|
||||||
|
if(line.hasOption("print-betas")) {
|
||||||
|
System.out.println("total betas found: " + result.getBetaCount());
|
||||||
|
}
|
||||||
|
if(line.hasOption("print-time")) {
|
||||||
|
System.out.println("simulation time (seconds): " + result.getSimulationTime());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
catch (ParseException exp) {
|
catch (ParseException exp) {
|
||||||
@@ -286,43 +239,299 @@ public class CommandLineInterface {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static Option outputFileOption() {
|
||||||
|
Option outputFile = Option.builder("o")
|
||||||
|
.longOpt("output-file")
|
||||||
|
.hasArg()
|
||||||
|
.argName("filename")
|
||||||
|
.desc("Name of output file")
|
||||||
|
.required()
|
||||||
|
.build();
|
||||||
|
return outputFile;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Options buildMainOptions() {
|
||||||
|
Options mainOptions = new Options();
|
||||||
|
Option help = Option.builder("help")
|
||||||
|
.desc("Displays this help menu")
|
||||||
|
.build();
|
||||||
|
Option makeCells = Option.builder("cells")
|
||||||
|
.longOpt("make-cells")
|
||||||
|
.desc("Makes a cell sample file of distinct T cells")
|
||||||
|
.build();
|
||||||
|
Option makePlate = Option.builder("plate")
|
||||||
|
.longOpt("make-plate")
|
||||||
|
.desc("Makes a sample plate file. Requires a cell sample file.")
|
||||||
|
.build();
|
||||||
|
Option makeGraph = Option.builder("graph")
|
||||||
|
.longOpt("make-graph")
|
||||||
|
.desc("Makes a graph/data file. Requires a cell sample file and a sample plate file")
|
||||||
|
.build();
|
||||||
|
Option matchCDR3 = Option.builder("match")
|
||||||
|
.longOpt("match-cdr3")
|
||||||
|
.desc("Matches CDR3s. Requires a graph/data file.")
|
||||||
|
.build();
|
||||||
|
Option printVersion = Option.builder("version")
|
||||||
|
.desc("Prints the program version number to stdout").build();
|
||||||
|
OptionGroup mainGroup = new OptionGroup();
|
||||||
|
mainGroup.addOption(help);
|
||||||
|
mainGroup.addOption(printVersion);
|
||||||
|
mainGroup.addOption(makeCells);
|
||||||
|
mainGroup.addOption(makePlate);
|
||||||
|
mainGroup.addOption(makeGraph);
|
||||||
|
mainGroup.addOption(matchCDR3);
|
||||||
|
mainGroup.setRequired(true);
|
||||||
|
mainOptions.addOptionGroup(mainGroup);
|
||||||
|
return mainOptions;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Options buildCellOptions() {
|
||||||
|
Options cellOptions = new Options();
|
||||||
|
Option numCells = Option.builder("n")
|
||||||
|
.longOpt("num-cells")
|
||||||
|
.desc("The number of distinct cells to generate")
|
||||||
|
.hasArg()
|
||||||
|
.argName("number")
|
||||||
|
.required().build();
|
||||||
|
Option cdr3Diversity = Option.builder("d")
|
||||||
|
.longOpt("diversity-factor")
|
||||||
|
.desc("The factor by which unique CDR3s outnumber unique CDR1s")
|
||||||
|
.hasArg()
|
||||||
|
.argName("factor")
|
||||||
|
.required().build();
|
||||||
|
cellOptions.addOption(numCells);
|
||||||
|
cellOptions.addOption(cdr3Diversity);
|
||||||
|
cellOptions.addOption(outputFileOption());
|
||||||
|
return cellOptions;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Options buildPlateOptions() {
|
||||||
|
Options plateOptions = new Options();
|
||||||
|
Option cellFile = Option.builder("c") // add this to plate options
|
||||||
|
.longOpt("cell-file")
|
||||||
|
.desc("The cell sample file to use")
|
||||||
|
.hasArg()
|
||||||
|
.argName("filename")
|
||||||
|
.required().build();
|
||||||
|
Option numWells = Option.builder("w")// add this to plate options
|
||||||
|
.longOpt("wells")
|
||||||
|
.desc("The number of wells on the sample plate")
|
||||||
|
.hasArg()
|
||||||
|
.argName("number")
|
||||||
|
.required().build();
|
||||||
|
//options group for choosing with distribution to use
|
||||||
|
OptionGroup distributions = new OptionGroup();// add this to plate options
|
||||||
|
distributions.setRequired(true);
|
||||||
|
Option poisson = Option.builder("poisson")
|
||||||
|
.desc("Use a Poisson distribution for cell sample")
|
||||||
|
.build();
|
||||||
|
Option gaussian = Option.builder("gaussian")
|
||||||
|
.desc("Use a Gaussian distribution for cell sample")
|
||||||
|
.build();
|
||||||
|
Option exponential = Option.builder("exponential")
|
||||||
|
.desc("Use an exponential distribution for cell sample")
|
||||||
|
.build();
|
||||||
|
distributions.addOption(poisson);
|
||||||
|
distributions.addOption(gaussian);
|
||||||
|
distributions.addOption(exponential);
|
||||||
|
//options group for statistical distribution parameters
|
||||||
|
OptionGroup statParams = new OptionGroup();// add this to plate options
|
||||||
|
Option stdDev = Option.builder("stddev")
|
||||||
|
.desc("If using -gaussian flag, standard deviation for distrbution")
|
||||||
|
.hasArg()
|
||||||
|
.argName("value")
|
||||||
|
.build();
|
||||||
|
Option lambda = Option.builder("lambda")
|
||||||
|
.desc("If using -exponential flag, lambda value for distribution")
|
||||||
|
.hasArg()
|
||||||
|
.argName("value")
|
||||||
|
.build();
|
||||||
|
statParams.addOption(stdDev);
|
||||||
|
statParams.addOption(lambda);
|
||||||
|
//Option group for random plate or set populations
|
||||||
|
OptionGroup wellPopOptions = new OptionGroup(); // add this to plate options
|
||||||
|
wellPopOptions.setRequired(true);
|
||||||
|
Option randomWellPopulations = Option.builder("random")
|
||||||
|
.desc("Randomize well populations on sample plate. Takes two arguments: the minimum possible population and the maximum possible population.")
|
||||||
|
.hasArgs()
|
||||||
|
.numberOfArgs(2)
|
||||||
|
.argName("min> <max")
|
||||||
|
.build();
|
||||||
|
Option specificWellPopulations = Option.builder("pop")
|
||||||
|
.desc("The well populations for each section of the sample plate. There will be as many sections as there are populations given.")
|
||||||
|
.hasArgs()
|
||||||
|
.argName("number [number]...")
|
||||||
|
.build();
|
||||||
|
Option dropoutRate = Option.builder("err") //add this to plate options
|
||||||
|
.hasArg()
|
||||||
|
.desc("The sequence dropout rate due to amplification error. (0.0 - 1.0)")
|
||||||
|
.argName("rate")
|
||||||
|
.required()
|
||||||
|
.build();
|
||||||
|
wellPopOptions.addOption(randomWellPopulations);
|
||||||
|
wellPopOptions.addOption(specificWellPopulations);
|
||||||
|
plateOptions.addOption(cellFile);
|
||||||
|
plateOptions.addOption(numWells);
|
||||||
|
plateOptions.addOptionGroup(distributions);
|
||||||
|
plateOptions.addOptionGroup(statParams);
|
||||||
|
plateOptions.addOptionGroup(wellPopOptions);
|
||||||
|
plateOptions.addOption(dropoutRate);
|
||||||
|
plateOptions.addOption(outputFileOption());
|
||||||
|
return plateOptions;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Options buildGraphOptions() {
|
||||||
|
Options graphOptions = new Options();
|
||||||
|
Option cellFilename = Option.builder("c")
|
||||||
|
.longOpt("cell-file")
|
||||||
|
.desc("Cell sample file to use for checking pairing accuracy")
|
||||||
|
.hasArg()
|
||||||
|
.argName("filename")
|
||||||
|
.required().build();
|
||||||
|
Option plateFilename = Option.builder("p")
|
||||||
|
.longOpt("plate-filename")
|
||||||
|
.desc("Sample plate file from which to construct graph")
|
||||||
|
.hasArg()
|
||||||
|
.argName("filename")
|
||||||
|
.required().build();
|
||||||
|
Option outputGraphML = Option.builder("graphml")
|
||||||
|
.desc("(Optional) Output GraphML file")
|
||||||
|
.build();
|
||||||
|
Option outputSerializedBinary = Option.builder("nb")
|
||||||
|
.longOpt("no-binary")
|
||||||
|
.desc("(Optional) Don't output serialized binary file")
|
||||||
|
.build();
|
||||||
|
Option readDepth = Option.builder("rd")
|
||||||
|
.longOpt("read-depth")
|
||||||
|
.desc("(Optional) The number of times to read each sequence.")
|
||||||
|
.hasArg()
|
||||||
|
.argName("depth")
|
||||||
|
.build();
|
||||||
|
Option readErrorProb = Option.builder("err")
|
||||||
|
.longOpt("read-error-prob")
|
||||||
|
.desc("(Optional) The probability that a sequence will be misread. (0.0 - 1.0)")
|
||||||
|
.hasArg()
|
||||||
|
.argName("prob")
|
||||||
|
.build();
|
||||||
|
Option errorCollisionProb = Option.builder("coll")
|
||||||
|
.longOpt("error-collision-prob")
|
||||||
|
.desc("(Optional) The probability that two misreads will produce the same spurious sequence. (0.0 - 1.0)")
|
||||||
|
.hasArg()
|
||||||
|
.argName("prob")
|
||||||
|
.build();
|
||||||
|
graphOptions.addOption(cellFilename);
|
||||||
|
graphOptions.addOption(plateFilename);
|
||||||
|
graphOptions.addOption(outputFileOption());
|
||||||
|
graphOptions.addOption(outputGraphML);
|
||||||
|
graphOptions.addOption(outputSerializedBinary);
|
||||||
|
graphOptions.addOption(readDepth);
|
||||||
|
graphOptions.addOption(readErrorProb);
|
||||||
|
graphOptions.addOption(errorCollisionProb);
|
||||||
|
return graphOptions;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Options buildMatchCDR3options() {
|
||||||
|
Options matchCDR3options = new Options();
|
||||||
|
Option graphFilename = Option.builder("g")
|
||||||
|
.longOpt("graph-file")
|
||||||
|
.desc("The graph/data file to use")
|
||||||
|
.hasArg()
|
||||||
|
.argName("filename")
|
||||||
|
.required().build();
|
||||||
|
Option minOccupancyOverlap = Option.builder("min")
|
||||||
|
.desc("The minimum number of shared wells to attempt to match a sequence pair")
|
||||||
|
.hasArg()
|
||||||
|
.argName("number")
|
||||||
|
.required().build();
|
||||||
|
Option maxOccupancyOverlap = Option.builder("max")
|
||||||
|
.desc("The maximum number of shared wells to attempt to match a sequence pair")
|
||||||
|
.hasArg()
|
||||||
|
.argName("number")
|
||||||
|
.required().build();
|
||||||
|
Option minOverlapPercent = Option.builder("minpct")
|
||||||
|
.desc("(Optional) The minimum percentage of a sequence's total occupancy shared by another sequence to attempt matching. (0 - 100) ")
|
||||||
|
.hasArg()
|
||||||
|
.argName("percent")
|
||||||
|
.build();
|
||||||
|
Option maxOccupancyDifference = Option.builder("maxdiff")
|
||||||
|
.desc("(Optional) The maximum difference in total occupancy between two sequences to attempt matching.")
|
||||||
|
.hasArg()
|
||||||
|
.argName("number")
|
||||||
|
.build();
|
||||||
|
Option outputFile = Option.builder("o") //can't call the method this time, because this one's optional
|
||||||
|
.longOpt("output-file")
|
||||||
|
.hasArg()
|
||||||
|
.argName("filename")
|
||||||
|
.desc("(Optional) Name of output the output file. If not present, no file will be written.")
|
||||||
|
.build();
|
||||||
|
matchCDR3options.addOption(graphFilename)
|
||||||
|
.addOption(minOccupancyOverlap)
|
||||||
|
.addOption(maxOccupancyOverlap)
|
||||||
|
.addOption(minOverlapPercent)
|
||||||
|
.addOption(maxOccupancyDifference)
|
||||||
|
.addOption(outputFile);
|
||||||
|
|
||||||
|
//options for output to System.out
|
||||||
|
Option printAlphaCount = Option.builder().longOpt("print-alphas")
|
||||||
|
.desc("(Optional) Print the number of distinct alpha sequences to stdout.").build();
|
||||||
|
Option printBetaCount = Option.builder().longOpt("print-betas")
|
||||||
|
.desc("(Optional) Print the number of distinct beta sequences to stdout.").build();
|
||||||
|
Option printTime = Option.builder().longOpt("print-time")
|
||||||
|
.desc("(Optional) Print the total simulation time to stdout.").build();
|
||||||
|
Option printErrorRate = Option.builder().longOpt("print-error")
|
||||||
|
.desc("(Optional) Print the pairing error rate to stdout").build();
|
||||||
|
Option printAttempt = Option.builder().longOpt("print-attempt")
|
||||||
|
.desc("(Optional) Print the pairing attempt rate to stdout").build();
|
||||||
|
Option printCorrect = Option.builder().longOpt("print-correct")
|
||||||
|
.desc("(Optional) Print the number of correct pairs to stdout").build();
|
||||||
|
Option printIncorrect = Option.builder().longOpt("print-incorrect")
|
||||||
|
.desc("(Optional) Print the number of incorrect pairs to stdout").build();
|
||||||
|
Option printMetadata = Option.builder().longOpt("print-metadata")
|
||||||
|
.desc("(Optional) Print a full summary of the matching results to stdout.").build();
|
||||||
|
|
||||||
|
matchCDR3options
|
||||||
|
.addOption(printErrorRate)
|
||||||
|
.addOption(printAttempt)
|
||||||
|
.addOption(printCorrect)
|
||||||
|
.addOption(printIncorrect)
|
||||||
|
.addOption(printMetadata)
|
||||||
|
.addOption(printAlphaCount)
|
||||||
|
.addOption(printBetaCount)
|
||||||
|
.addOption(printTime);
|
||||||
|
return matchCDR3options;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
private static CellSample getCells(String cellFilename) {
|
||||||
|
assert cellFilename != null;
|
||||||
|
CellFileReader reader = new CellFileReader(cellFilename);
|
||||||
|
return reader.getCellSample();
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Plate getPlate(String plateFilename) {
|
||||||
|
assert plateFilename != null;
|
||||||
|
PlateFileReader reader = new PlateFileReader(plateFilename);
|
||||||
|
return reader.getSamplePlate();
|
||||||
|
}
|
||||||
|
|
||||||
|
private static GraphWithMapData getGraph(String graphFilename) {
|
||||||
|
assert graphFilename != null;
|
||||||
|
try{
|
||||||
|
GraphDataObjectReader reader = new GraphDataObjectReader(graphFilename, false);
|
||||||
|
return reader.getData();
|
||||||
|
|
||||||
|
}
|
||||||
|
catch (IOException ex) {
|
||||||
|
ex.printStackTrace();
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
//for calling from command line
|
//for calling from command line
|
||||||
public static void makeCells(String filename, Integer numCells, Integer cdr1Freq){
|
public static void makeCells(String filename, Integer numCells, Integer cdr1Freq) {
|
||||||
CellSample sample = Simulator.generateCellSample(numCells, cdr1Freq);
|
CellSample sample = new CellSample(numCells, cdr1Freq);
|
||||||
CellFileWriter writer = new CellFileWriter(filename, sample);
|
CellFileWriter writer = new CellFileWriter(filename, sample);
|
||||||
writer.writeCellsToFile();
|
writer.writeCellsToFile();
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void makePlateExp(String cellFile, String filename, Double lambda,
|
|
||||||
Integer numWells, Integer[] concentrations, Double dropOutRate){
|
|
||||||
CellFileReader cellReader = new CellFileReader(cellFile);
|
|
||||||
Plate samplePlate = new Plate(numWells, dropOutRate, concentrations);
|
|
||||||
samplePlate.fillWellsExponential(cellReader.getFilename(), cellReader.getCells(), lambda);
|
|
||||||
PlateFileWriter writer = new PlateFileWriter(filename, samplePlate);
|
|
||||||
writer.writePlateFile();
|
|
||||||
}
|
|
||||||
|
|
||||||
private static void makePlatePoisson(String cellFile, String filename, Integer numWells,
|
|
||||||
Integer[] concentrations, Double dropOutRate){
|
|
||||||
CellFileReader cellReader = new CellFileReader(cellFile);
|
|
||||||
Double stdDev = Math.sqrt(cellReader.getCellCount());
|
|
||||||
Plate samplePlate = new Plate(numWells, dropOutRate, concentrations);
|
|
||||||
samplePlate.fillWells(cellReader.getFilename(), cellReader.getCells(), stdDev);
|
|
||||||
PlateFileWriter writer = new PlateFileWriter(filename, samplePlate);
|
|
||||||
writer.writePlateFile();
|
|
||||||
}
|
|
||||||
|
|
||||||
private static void makePlate(String cellFile, String filename, Double stdDev,
|
|
||||||
Integer numWells, Integer[] concentrations, Double dropOutRate){
|
|
||||||
CellFileReader cellReader = new CellFileReader(cellFile);
|
|
||||||
Plate samplePlate = new Plate(numWells, dropOutRate, concentrations);
|
|
||||||
samplePlate.fillWells(cellReader.getFilename(), cellReader.getCells(), stdDev);
|
|
||||||
PlateFileWriter writer = new PlateFileWriter(filename, samplePlate);
|
|
||||||
writer.writePlateFile();
|
|
||||||
}
|
|
||||||
|
|
||||||
private static void matchCDR3s(String graphFile, Integer lowThreshold, Integer highThreshold,
|
|
||||||
Integer occupancyDifference, Integer overlapPercent) {
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,10 +4,6 @@ import java.math.MathContext;
|
|||||||
|
|
||||||
public abstract class Equations {
|
public abstract class Equations {
|
||||||
|
|
||||||
public static int getRandomNumber(int min, int max) {
|
|
||||||
return (int) ((Math.random() * (max - min)) + min);
|
|
||||||
}
|
|
||||||
|
|
||||||
//pValue calculation as described in original pairSEQ paper.
|
//pValue calculation as described in original pairSEQ paper.
|
||||||
//Included for comparison with original results.
|
//Included for comparison with original results.
|
||||||
//Not used by BiGpairSEQ for matching.
|
//Not used by BiGpairSEQ for matching.
|
||||||
|
|||||||
@@ -1,10 +1,12 @@
|
|||||||
import java.io.*;
|
import java.io.*;
|
||||||
|
|
||||||
public class GraphDataObjectReader {
|
public class GraphDataObjectReader {
|
||||||
|
|
||||||
private GraphWithMapData data;
|
private GraphWithMapData data;
|
||||||
private String filename;
|
private String filename;
|
||||||
|
|
||||||
public GraphDataObjectReader(String filename) throws IOException {
|
|
||||||
|
public GraphDataObjectReader(String filename, boolean verbose) throws IOException {
|
||||||
if(!filename.matches(".*\\.ser")){
|
if(!filename.matches(".*\\.ser")){
|
||||||
filename = filename + ".ser";
|
filename = filename + ".ser";
|
||||||
}
|
}
|
||||||
@@ -13,10 +15,13 @@ public class GraphDataObjectReader {
|
|||||||
BufferedInputStream fileIn = new BufferedInputStream(new FileInputStream(filename));
|
BufferedInputStream fileIn = new BufferedInputStream(new FileInputStream(filename));
|
||||||
ObjectInputStream in = new ObjectInputStream(fileIn))
|
ObjectInputStream in = new ObjectInputStream(fileIn))
|
||||||
{
|
{
|
||||||
System.out.println("Reading graph data from file. This may take some time");
|
if (verbose) {
|
||||||
System.out.println("File I/O time is not included in results");
|
System.out.println("Reading graph data from file. This may take some time");
|
||||||
|
System.out.println("File I/O time is not included in results");
|
||||||
|
}
|
||||||
data = (GraphWithMapData) in.readObject();
|
data = (GraphWithMapData) in.readObject();
|
||||||
} catch (FileNotFoundException | ClassNotFoundException ex) {
|
} catch (FileNotFoundException | ClassNotFoundException ex) {
|
||||||
|
System.out.println("Graph/data file " + filename + " not found.");
|
||||||
ex.printStackTrace();
|
ex.printStackTrace();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
import org.jgrapht.Graph;
|
||||||
|
|
||||||
import java.io.BufferedOutputStream;
|
import java.io.BufferedOutputStream;
|
||||||
import java.io.FileOutputStream;
|
import java.io.FileOutputStream;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
@@ -7,6 +9,7 @@ public class GraphDataObjectWriter {
|
|||||||
|
|
||||||
private GraphWithMapData data;
|
private GraphWithMapData data;
|
||||||
private String filename;
|
private String filename;
|
||||||
|
private boolean verbose = true;
|
||||||
|
|
||||||
public GraphDataObjectWriter(String filename, GraphWithMapData data) {
|
public GraphDataObjectWriter(String filename, GraphWithMapData data) {
|
||||||
if(!filename.matches(".*\\.ser")){
|
if(!filename.matches(".*\\.ser")){
|
||||||
@@ -16,13 +19,24 @@ public class GraphDataObjectWriter {
|
|||||||
this.data = data;
|
this.data = data;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public GraphDataObjectWriter(String filename, GraphWithMapData data, boolean verbose) {
|
||||||
|
this.verbose = verbose;
|
||||||
|
if(!filename.matches(".*\\.ser")){
|
||||||
|
filename = filename + ".ser";
|
||||||
|
}
|
||||||
|
this.filename = filename;
|
||||||
|
this.data = data;
|
||||||
|
}
|
||||||
|
|
||||||
public void writeDataToFile() {
|
public void writeDataToFile() {
|
||||||
try (BufferedOutputStream bufferedOut = new BufferedOutputStream(new FileOutputStream(filename));
|
try (BufferedOutputStream bufferedOut = new BufferedOutputStream(new FileOutputStream(filename));
|
||||||
|
|
||||||
ObjectOutputStream out = new ObjectOutputStream(bufferedOut);
|
ObjectOutputStream out = new ObjectOutputStream(bufferedOut);
|
||||||
){
|
){
|
||||||
System.out.println("Writing graph and occupancy data to file. This may take some time.");
|
if(verbose) {
|
||||||
System.out.println("File I/O time is not included in results.");
|
System.out.println("Writing graph and occupancy data to file. This may take some time.");
|
||||||
|
System.out.println("File I/O time is not included in results.");
|
||||||
|
}
|
||||||
out.writeObject(data);
|
out.writeObject(data);
|
||||||
} catch (IOException ex) {
|
} catch (IOException ex) {
|
||||||
ex.printStackTrace();
|
ex.printStackTrace();
|
||||||
|
|||||||
@@ -1,35 +0,0 @@
|
|||||||
import org.jgrapht.graph.SimpleWeightedGraph;
|
|
||||||
import org.jgrapht.nio.graphml.GraphMLImporter;
|
|
||||||
|
|
||||||
import java.io.BufferedReader;
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.nio.file.Files;
|
|
||||||
import java.nio.file.Path;
|
|
||||||
|
|
||||||
public class GraphMLFileReader {
|
|
||||||
|
|
||||||
private String filename;
|
|
||||||
private SimpleWeightedGraph graph;
|
|
||||||
|
|
||||||
public GraphMLFileReader(String filename, SimpleWeightedGraph graph) {
|
|
||||||
if(!filename.matches(".*\\.graphml")){
|
|
||||||
filename = filename + ".graphml";
|
|
||||||
}
|
|
||||||
this.filename = filename;
|
|
||||||
this.graph = graph;
|
|
||||||
|
|
||||||
try(//don't need to close reader bc of try-with-resources auto-closing
|
|
||||||
BufferedReader reader = Files.newBufferedReader(Path.of(filename));
|
|
||||||
){
|
|
||||||
GraphMLImporter<SimpleWeightedGraph, BufferedReader> importer = new GraphMLImporter<>();
|
|
||||||
importer.importGraph(graph, reader);
|
|
||||||
}
|
|
||||||
catch (IOException ex) {
|
|
||||||
System.out.println("Graph file " + filename + " not found.");
|
|
||||||
System.err.println(ex);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public SimpleWeightedGraph getGraph() { return graph; }
|
|
||||||
|
|
||||||
}
|
|
||||||
@@ -1,20 +1,38 @@
|
|||||||
|
import org.jgrapht.graph.DefaultWeightedEdge;
|
||||||
import org.jgrapht.graph.SimpleWeightedGraph;
|
import org.jgrapht.graph.SimpleWeightedGraph;
|
||||||
import org.jgrapht.nio.dot.DOTExporter;
|
import org.jgrapht.nio.Attribute;
|
||||||
|
import org.jgrapht.nio.AttributeType;
|
||||||
|
import org.jgrapht.nio.DefaultAttribute;
|
||||||
import org.jgrapht.nio.graphml.GraphMLExporter;
|
import org.jgrapht.nio.graphml.GraphMLExporter;
|
||||||
|
import org.jgrapht.nio.graphml.GraphMLExporter.AttributeCategory;
|
||||||
|
import org.w3c.dom.Attr;
|
||||||
|
|
||||||
import java.io.BufferedWriter;
|
import java.io.BufferedWriter;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.nio.file.Files;
|
import java.nio.file.Files;
|
||||||
import java.nio.file.Path;
|
import java.nio.file.Path;
|
||||||
import java.nio.file.StandardOpenOption;
|
import java.nio.file.StandardOpenOption;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
public class GraphMLFileWriter {
|
public class GraphMLFileWriter {
|
||||||
|
|
||||||
String filename;
|
String filename;
|
||||||
SimpleWeightedGraph graph;
|
SimpleWeightedGraph graph;
|
||||||
|
GraphWithMapData data;
|
||||||
|
Map<String, Attribute> graphAttributes;
|
||||||
|
|
||||||
|
public GraphMLFileWriter(String filename, GraphWithMapData data) {
|
||||||
|
if(!filename.matches(".*\\.graphml")){
|
||||||
|
filename = filename + ".graphml";
|
||||||
|
}
|
||||||
|
this.filename = filename;
|
||||||
|
this.data = data;
|
||||||
|
this.graph = data.getGraph();
|
||||||
|
graphAttributes = createGraphAttributes();
|
||||||
|
}
|
||||||
|
|
||||||
public GraphMLFileWriter(String filename, SimpleWeightedGraph graph) {
|
public GraphMLFileWriter(String filename, SimpleWeightedGraph<Vertex, DefaultWeightedEdge> graph) {
|
||||||
if(!filename.matches(".*\\.graphml")){
|
if(!filename.matches(".*\\.graphml")){
|
||||||
filename = filename + ".graphml";
|
filename = filename + ".graphml";
|
||||||
}
|
}
|
||||||
@@ -22,10 +40,56 @@ public class GraphMLFileWriter {
|
|||||||
this.graph = graph;
|
this.graph = graph;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private Map<String, Attribute> createGraphAttributes(){
|
||||||
|
Map<String, Attribute> ga = new HashMap<>();
|
||||||
|
//Sample plate filename
|
||||||
|
ga.put("sample plate filename", DefaultAttribute.createAttribute(data.getSourceFilename()));
|
||||||
|
// Number of wells
|
||||||
|
ga.put("well count", DefaultAttribute.createAttribute(data.getNumWells().toString()));
|
||||||
|
//Well populations
|
||||||
|
Integer[] wellPopulations = data.getWellPopulations();
|
||||||
|
StringBuilder populationsStringBuilder = new StringBuilder();
|
||||||
|
populationsStringBuilder.append(wellPopulations[0].toString());
|
||||||
|
for(int i = 1; i < wellPopulations.length; i++){
|
||||||
|
populationsStringBuilder.append(", ");
|
||||||
|
populationsStringBuilder.append(wellPopulations[i].toString());
|
||||||
|
}
|
||||||
|
String wellPopulationsString = populationsStringBuilder.toString();
|
||||||
|
ga.put("well populations", DefaultAttribute.createAttribute(wellPopulationsString));
|
||||||
|
ga.put("read depth", DefaultAttribute.createAttribute(data.getReadDepth().toString()));
|
||||||
|
ga.put("read error rate", DefaultAttribute.createAttribute(data.getReadErrorRate().toString()));
|
||||||
|
ga.put("error collision rate", DefaultAttribute.createAttribute(data.getErrorCollisionRate().toString()));
|
||||||
|
return ga;
|
||||||
|
}
|
||||||
|
|
||||||
public void writeGraphToFile() {
|
public void writeGraphToFile() {
|
||||||
try(BufferedWriter writer = Files.newBufferedWriter(Path.of(filename), StandardOpenOption.CREATE_NEW);
|
try(BufferedWriter writer = Files.newBufferedWriter(Path.of(filename), StandardOpenOption.CREATE_NEW);
|
||||||
){
|
){
|
||||||
GraphMLExporter<SimpleWeightedGraph, BufferedWriter> exporter = new GraphMLExporter<>();
|
//create exporter. Let the vertex labels be the unique ids for the vertices
|
||||||
|
GraphMLExporter<Vertex, SimpleWeightedGraph<Vertex, DefaultWeightedEdge>> exporter = new GraphMLExporter<>(v -> v.getVertexLabel().toString());
|
||||||
|
//set to export weights
|
||||||
|
exporter.setExportEdgeWeights(true);
|
||||||
|
//Set graph attributes
|
||||||
|
exporter.setGraphAttributeProvider( () -> graphAttributes);
|
||||||
|
//set type, sequence, and occupancy attributes for each vertex
|
||||||
|
//NEED TO ADD NEW FIELD FOR READ COUNT
|
||||||
|
exporter.setVertexAttributeProvider( v -> {
|
||||||
|
Map<String, Attribute> attributes = new HashMap<>();
|
||||||
|
attributes.put("type", DefaultAttribute.createAttribute(v.getType().name()));
|
||||||
|
attributes.put("sequence", DefaultAttribute.createAttribute(v.getSequence()));
|
||||||
|
attributes.put("occupancy", DefaultAttribute.createAttribute(v.getOccupancy()));
|
||||||
|
attributes.put("read count", DefaultAttribute.createAttribute(v.getReadCount()));
|
||||||
|
return attributes;
|
||||||
|
});
|
||||||
|
//register the attributes
|
||||||
|
for(String s : graphAttributes.keySet()) {
|
||||||
|
exporter.registerAttribute(s, AttributeCategory.GRAPH, AttributeType.STRING);
|
||||||
|
}
|
||||||
|
exporter.registerAttribute("type", AttributeCategory.NODE, AttributeType.STRING);
|
||||||
|
exporter.registerAttribute("sequence", AttributeCategory.NODE, AttributeType.STRING);
|
||||||
|
exporter.registerAttribute("occupancy", AttributeCategory.NODE, AttributeType.STRING);
|
||||||
|
exporter.registerAttribute("read count", AttributeCategory.NODE, AttributeType.STRING);
|
||||||
|
//export the graph
|
||||||
exporter.exportGraph(graph, writer);
|
exporter.exportGraph(graph, writer);
|
||||||
} catch(IOException ex){
|
} catch(IOException ex){
|
||||||
System.out.println("Could not make new file named "+filename);
|
System.out.println("Could not make new file named "+filename);
|
||||||
|
|||||||
@@ -2,89 +2,137 @@ import org.jgrapht.graph.DefaultWeightedEdge;
|
|||||||
import org.jgrapht.graph.SimpleWeightedGraph;
|
import org.jgrapht.graph.SimpleWeightedGraph;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
|
||||||
|
|
||||||
public abstract class GraphModificationFunctions {
|
public interface GraphModificationFunctions {
|
||||||
|
|
||||||
//remove over- and under-weight edges
|
//remove over- and under-weight edges, return removed edges
|
||||||
public static List<Integer[]> filterByOverlapThresholds(SimpleWeightedGraph<Integer, DefaultWeightedEdge> graph,
|
static Map<Vertex[], Integer> filterByOverlapThresholds(SimpleWeightedGraph<Vertex, DefaultWeightedEdge> graph,
|
||||||
int low, int high) {
|
int low, int high, boolean saveEdges) {
|
||||||
List<Integer[]> removedEdges = new ArrayList<>();
|
Map<Vertex[], Integer> removedEdges = new HashMap<>();
|
||||||
for(DefaultWeightedEdge e: graph.edgeSet()){
|
for (DefaultWeightedEdge e : graph.edgeSet()) {
|
||||||
if ((graph.getEdgeWeight(e) > high) || (graph.getEdgeWeight(e) < low)){
|
if ((graph.getEdgeWeight(e) > high) || (graph.getEdgeWeight(e) < low)) {
|
||||||
Integer source = graph.getEdgeSource(e);
|
if(saveEdges) {
|
||||||
Integer target = graph.getEdgeTarget(e);
|
Vertex source = graph.getEdgeSource(e);
|
||||||
Integer weight = (int) graph.getEdgeWeight(e);
|
Vertex target = graph.getEdgeTarget(e);
|
||||||
Integer[] edge = {source, target, weight};
|
Integer weight = (int) graph.getEdgeWeight(e);
|
||||||
removedEdges.add(edge);
|
Vertex[] edge = {source, target};
|
||||||
|
removedEdges.put(edge, weight);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
graph.setEdgeWeight(e, 0.0);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for (Integer[] edge : removedEdges) {
|
if(saveEdges) {
|
||||||
graph.removeEdge(edge[0], edge[1]);
|
for (Vertex[] edge : removedEdges.keySet()) {
|
||||||
|
graph.removeEdge(edge[0], edge[1]);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return removedEdges;
|
return removedEdges;
|
||||||
}
|
}
|
||||||
|
|
||||||
//Remove edges for pairs with large occupancy discrepancy
|
//Remove edges for pairs with large occupancy discrepancy, return removed edges
|
||||||
public static List<Integer[]> filterByRelativeOccupancy(SimpleWeightedGraph<Integer, DefaultWeightedEdge> graph,
|
static Map<Vertex[], Integer> filterByRelativeOccupancy(SimpleWeightedGraph<Vertex, DefaultWeightedEdge> graph,
|
||||||
Map<Integer, Integer> alphaWellCounts,
|
Integer maxOccupancyDifference, boolean saveEdges) {
|
||||||
Map<Integer, Integer> betaWellCounts,
|
Map<Vertex[], Integer> removedEdges = new HashMap<>();
|
||||||
Map<Integer, Integer> plateVtoAMap,
|
|
||||||
Map<Integer, Integer> plateVtoBMap,
|
|
||||||
Integer maxOccupancyDifference) {
|
|
||||||
List<Integer[]> removedEdges = new ArrayList<>();
|
|
||||||
for (DefaultWeightedEdge e : graph.edgeSet()) {
|
for (DefaultWeightedEdge e : graph.edgeSet()) {
|
||||||
Integer alphaOcc = alphaWellCounts.get(plateVtoAMap.get(graph.getEdgeSource(e)));
|
Integer alphaOcc = graph.getEdgeSource(e).getOccupancy();
|
||||||
Integer betaOcc = betaWellCounts.get(plateVtoBMap.get(graph.getEdgeTarget(e)));
|
Integer betaOcc = graph.getEdgeTarget(e).getOccupancy();
|
||||||
if (Math.abs(alphaOcc - betaOcc) >= maxOccupancyDifference) {
|
if (Math.abs(alphaOcc - betaOcc) >= maxOccupancyDifference) {
|
||||||
Integer source = graph.getEdgeSource(e);
|
if (saveEdges) {
|
||||||
Integer target = graph.getEdgeTarget(e);
|
Vertex source = graph.getEdgeSource(e);
|
||||||
Integer weight = (int) graph.getEdgeWeight(e);
|
Vertex target = graph.getEdgeTarget(e);
|
||||||
Integer[] edge = {source, target, weight};
|
Integer weight = (int) graph.getEdgeWeight(e);
|
||||||
removedEdges.add(edge);
|
Vertex[] edge = {source, target};
|
||||||
|
removedEdges.put(edge, weight);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
graph.setEdgeWeight(e, 0.0);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for (Integer[] edge : removedEdges) {
|
if(saveEdges) {
|
||||||
graph.removeEdge(edge[0], edge[1]);
|
for (Vertex[] edge : removedEdges.keySet()) {
|
||||||
|
graph.removeEdge(edge[0], edge[1]);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return removedEdges;
|
return removedEdges;
|
||||||
}
|
}
|
||||||
|
|
||||||
//Remove edges for pairs where overlap size is significantly lower than the well occupancy
|
//Remove edges for pairs where overlap size is significantly lower than the well occupancy, return removed edges
|
||||||
public static List<Integer[]> filterByOverlapPercent(SimpleWeightedGraph<Integer, DefaultWeightedEdge> graph,
|
static Map<Vertex[], Integer> filterByOverlapPercent(SimpleWeightedGraph<Vertex, DefaultWeightedEdge> graph,
|
||||||
Map<Integer, Integer> alphaWellCounts,
|
Integer minOverlapPercent,
|
||||||
Map<Integer, Integer> betaWellCounts,
|
boolean saveEdges) {
|
||||||
Map<Integer, Integer> plateVtoAMap,
|
Map<Vertex[], Integer> removedEdges = new HashMap<>();
|
||||||
Map<Integer, Integer> plateVtoBMap,
|
|
||||||
Integer minOverlapPercent) {
|
|
||||||
List<Integer[]> removedEdges = new ArrayList<>();
|
|
||||||
for (DefaultWeightedEdge e : graph.edgeSet()) {
|
for (DefaultWeightedEdge e : graph.edgeSet()) {
|
||||||
Integer alphaOcc = alphaWellCounts.get(plateVtoAMap.get(graph.getEdgeSource(e)));
|
Integer alphaOcc = graph.getEdgeSource(e).getOccupancy();
|
||||||
Integer betaOcc = betaWellCounts.get(plateVtoBMap.get(graph.getEdgeTarget(e)));
|
Integer betaOcc = graph.getEdgeTarget(e).getOccupancy();
|
||||||
double weight = graph.getEdgeWeight(e);
|
double weight = graph.getEdgeWeight(e);
|
||||||
double min = minOverlapPercent / 100.0;
|
double min = minOverlapPercent / 100.0;
|
||||||
if ((weight / alphaOcc < min) || (weight / betaOcc < min)) {
|
if ((weight / alphaOcc < min) || (weight / betaOcc < min)) {
|
||||||
Integer source = graph.getEdgeSource(e);
|
if (saveEdges) {
|
||||||
Integer target = graph.getEdgeTarget(e);
|
Vertex source = graph.getEdgeSource(e);
|
||||||
Integer intWeight = (int) graph.getEdgeWeight(e);
|
Vertex target = graph.getEdgeTarget(e);
|
||||||
Integer[] edge = {source, target, intWeight};
|
Integer intWeight = (int) graph.getEdgeWeight(e);
|
||||||
removedEdges.add(edge);
|
Vertex[] edge = {source, target};
|
||||||
|
removedEdges.put(edge, intWeight);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
graph.setEdgeWeight(e, 0.0);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for (Integer[] edge : removedEdges) {
|
if(saveEdges) {
|
||||||
graph.removeEdge(edge[0], edge[1]);
|
for (Vertex[] edge : removedEdges.keySet()) {
|
||||||
|
graph.removeEdge(edge[0], edge[1]);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return removedEdges;
|
return removedEdges;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void addRemovedEdges(SimpleWeightedGraph<Integer, DefaultWeightedEdge> graph,
|
static Map<Vertex[], Integer> filterByRelativeReadCount (SimpleWeightedGraph<Vertex, DefaultWeightedEdge> graph, Integer threshold, boolean saveEdges) {
|
||||||
List<Integer[]> removedEdges) {
|
Map<Vertex[], Integer> removedEdges = new HashMap<>();
|
||||||
for (Integer[] edge : removedEdges) {
|
Boolean passes;
|
||||||
|
for (DefaultWeightedEdge e : graph.edgeSet()) {
|
||||||
|
Integer alphaReadCount = graph.getEdgeSource(e).getReadCount();
|
||||||
|
Integer betaReadCount = graph.getEdgeTarget(e).getReadCount();
|
||||||
|
passes = RelativeReadCountFilterFunction(threshold, alphaReadCount, betaReadCount);
|
||||||
|
if (!passes) {
|
||||||
|
if (saveEdges) {
|
||||||
|
Vertex source = graph.getEdgeSource(e);
|
||||||
|
Vertex target = graph.getEdgeTarget(e);
|
||||||
|
Integer intWeight = (int) graph.getEdgeWeight(e);
|
||||||
|
Vertex[] edge = {source, target};
|
||||||
|
removedEdges.put(edge, intWeight);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
graph.setEdgeWeight(e, 0.0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if(saveEdges) {
|
||||||
|
for (Vertex[] edge : removedEdges.keySet()) {
|
||||||
|
graph.removeEdge(edge[0], edge[1]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return removedEdges;
|
||||||
|
}
|
||||||
|
|
||||||
|
static Boolean RelativeReadCountFilterFunction(Integer threshold, Integer alphaReadCount, Integer betaReadCount) {
|
||||||
|
return Math.abs(alphaReadCount - betaReadCount) < threshold;
|
||||||
|
}
|
||||||
|
|
||||||
|
static void addRemovedEdges(SimpleWeightedGraph<Vertex, DefaultWeightedEdge> graph,
|
||||||
|
Map<Vertex[], Integer> removedEdges) {
|
||||||
|
for (Vertex[] edge : removedEdges.keySet()) {
|
||||||
DefaultWeightedEdge e = graph.addEdge(edge[0], edge[1]);
|
DefaultWeightedEdge e = graph.addEdge(edge[0], edge[1]);
|
||||||
graph.setEdgeWeight(e, (double) edge[2]);
|
graph.setEdgeWeight(e, removedEdges.get(edge));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,41 +6,50 @@ import java.util.Map;
|
|||||||
//Can't just write the graph, because I need the occupancy data too.
|
//Can't just write the graph, because I need the occupancy data too.
|
||||||
//Makes most sense to serialize object and write that to a file.
|
//Makes most sense to serialize object and write that to a file.
|
||||||
//Which means there's no reason to split map data and graph data up.
|
//Which means there's no reason to split map data and graph data up.
|
||||||
|
//Custom vertex class means a lot of the map data can now be encoded in the graph itself
|
||||||
public class GraphWithMapData implements java.io.Serializable {
|
public class GraphWithMapData implements java.io.Serializable {
|
||||||
|
|
||||||
private String sourceFilename;
|
private String sourceFilename;
|
||||||
private final SimpleWeightedGraph graph;
|
private final SimpleWeightedGraph graph;
|
||||||
private Integer numWells;
|
private Integer numWells;
|
||||||
private Integer[] wellConcentrations;
|
private Integer[] wellPopulations;
|
||||||
private Integer alphaCount;
|
private Integer alphaCount;
|
||||||
private Integer betaCount;
|
private Integer betaCount;
|
||||||
private final Map<Integer, Integer> distCellsMapAlphaKey;
|
private int readDepth;
|
||||||
private final Map<Integer, Integer> plateVtoAMap;
|
private double readErrorRate;
|
||||||
private final Map<Integer, Integer> plateVtoBMap;
|
private double errorCollisionRate;
|
||||||
private final Map<Integer, Integer> plateAtoVMap;
|
private final Map<String, String> distCellsMapAlphaKey;
|
||||||
private final Map<Integer, Integer> plateBtoVMap;
|
// private final Map<Integer, Integer> plateVtoAMap;
|
||||||
private final Map<Integer, Integer> alphaWellCounts;
|
// private final Map<Integer, Integer> plateVtoBMap;
|
||||||
private final Map<Integer, Integer> betaWellCounts;
|
// private final Map<Integer, Integer> plateAtoVMap;
|
||||||
|
// private final Map<Integer, Integer> plateBtoVMap;
|
||||||
|
// private final Map<Integer, Integer> alphaWellCounts;
|
||||||
|
// private final Map<Integer, Integer> betaWellCounts;
|
||||||
private final Duration time;
|
private final Duration time;
|
||||||
|
|
||||||
public GraphWithMapData(SimpleWeightedGraph graph, Integer numWells, Integer[] wellConcentrations,
|
public GraphWithMapData(SimpleWeightedGraph graph, Integer numWells, Integer[] wellConcentrations,
|
||||||
Integer alphaCount, Integer betaCount,
|
Map<String, String> distCellsMapAlphaKey, Integer alphaCount, Integer betaCount,
|
||||||
Map<Integer, Integer> distCellsMapAlphaKey, Map<Integer, Integer> plateVtoAMap,
|
Integer readDepth, Double readErrorRate, Double errorCollisionRate, Duration time){
|
||||||
Map<Integer,Integer> plateVtoBMap, Map<Integer, Integer> plateAtoVMap,
|
|
||||||
Map<Integer, Integer> plateBtoVMap, Map<Integer, Integer> alphaWellCounts,
|
// Map<Integer, Integer> plateVtoAMap,
|
||||||
Map<Integer, Integer> betaWellCounts, Duration time) {
|
// Map<Integer,Integer> plateVtoBMap, Map<Integer, Integer> plateAtoVMap,
|
||||||
|
// Map<Integer, Integer> plateBtoVMap, Map<Integer, Integer> alphaWellCounts,
|
||||||
|
// Map<Integer, Integer> betaWellCounts,) {
|
||||||
this.graph = graph;
|
this.graph = graph;
|
||||||
this.numWells = numWells;
|
this.numWells = numWells;
|
||||||
this.wellConcentrations = wellConcentrations;
|
this.wellPopulations = wellConcentrations;
|
||||||
this.alphaCount = alphaCount;
|
this.alphaCount = alphaCount;
|
||||||
this.betaCount = betaCount;
|
this.betaCount = betaCount;
|
||||||
this.distCellsMapAlphaKey = distCellsMapAlphaKey;
|
this.distCellsMapAlphaKey = distCellsMapAlphaKey;
|
||||||
this.plateVtoAMap = plateVtoAMap;
|
// this.plateVtoAMap = plateVtoAMap;
|
||||||
this.plateVtoBMap = plateVtoBMap;
|
// this.plateVtoBMap = plateVtoBMap;
|
||||||
this.plateAtoVMap = plateAtoVMap;
|
// this.plateAtoVMap = plateAtoVMap;
|
||||||
this.plateBtoVMap = plateBtoVMap;
|
// this.plateBtoVMap = plateBtoVMap;
|
||||||
this.alphaWellCounts = alphaWellCounts;
|
// this.alphaWellCounts = alphaWellCounts;
|
||||||
this.betaWellCounts = betaWellCounts;
|
// this.betaWellCounts = betaWellCounts;
|
||||||
|
this.readDepth = readDepth;
|
||||||
|
this.readErrorRate = readErrorRate;
|
||||||
|
this.errorCollisionRate = errorCollisionRate;
|
||||||
this.time = time;
|
this.time = time;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -52,8 +61,8 @@ public class GraphWithMapData implements java.io.Serializable {
|
|||||||
return numWells;
|
return numWells;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Integer[] getWellConcentrations() {
|
public Integer[] getWellPopulations() {
|
||||||
return wellConcentrations;
|
return wellPopulations;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Integer getAlphaCount() {
|
public Integer getAlphaCount() {
|
||||||
@@ -64,33 +73,35 @@ public class GraphWithMapData implements java.io.Serializable {
|
|||||||
return betaCount;
|
return betaCount;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Map<Integer, Integer> getDistCellsMapAlphaKey() {
|
public Map<String, String> getDistCellsMapAlphaKey() {
|
||||||
return distCellsMapAlphaKey;
|
return distCellsMapAlphaKey;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Map<Integer, Integer> getPlateVtoAMap() {
|
// public Map<Integer, Integer> getPlateVtoAMap() {
|
||||||
return plateVtoAMap;
|
// return plateVtoAMap;
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
|
// public Map<Integer, Integer> getPlateVtoBMap() {
|
||||||
|
// return plateVtoBMap;
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// public Map<Integer, Integer> getPlateAtoVMap() {
|
||||||
|
// return plateAtoVMap;
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// public Map<Integer, Integer> getPlateBtoVMap() {
|
||||||
|
// return plateBtoVMap;
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// public Map<Integer, Integer> getAlphaWellCounts() {
|
||||||
|
// return alphaWellCounts;
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// public Map<Integer, Integer> getBetaWellCounts() {
|
||||||
|
// return betaWellCounts;
|
||||||
|
// }
|
||||||
|
|
||||||
public Map<Integer, Integer> getPlateVtoBMap() {
|
public Integer getReadDepth() { return readDepth; }
|
||||||
return plateVtoBMap;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Map<Integer, Integer> getPlateAtoVMap() {
|
|
||||||
return plateAtoVMap;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Map<Integer, Integer> getPlateBtoVMap() {
|
|
||||||
return plateBtoVMap;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Map<Integer, Integer> getAlphaWellCounts() {
|
|
||||||
return alphaWellCounts;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Map<Integer, Integer> getBetaWellCounts() {
|
|
||||||
return betaWellCounts;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Duration getTime() {
|
public Duration getTime() {
|
||||||
return time;
|
return time;
|
||||||
@@ -103,4 +114,12 @@ public class GraphWithMapData implements java.io.Serializable {
|
|||||||
public String getSourceFilename() {
|
public String getSourceFilename() {
|
||||||
return sourceFilename;
|
return sourceFilename;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public Double getReadErrorRate() {
|
||||||
|
return readErrorRate;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Double getErrorCollisionRate() {
|
||||||
|
return errorCollisionRate;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
4
src/main/java/HeapType.java
Normal file
4
src/main/java/HeapType.java
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
public enum HeapType {
|
||||||
|
FIBONACCI,
|
||||||
|
PAIRING
|
||||||
|
}
|
||||||
@@ -1,14 +1,15 @@
|
|||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.List;
|
import java.util.*;
|
||||||
import java.util.Scanner;
|
import java.util.regex.Matcher;
|
||||||
import java.util.InputMismatchException;
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
//
|
//
|
||||||
public class InteractiveInterface {
|
public class InteractiveInterface {
|
||||||
|
|
||||||
final static Scanner sc = new Scanner(System.in);
|
private static final Random rand = BiGpairSEQ.getRand();
|
||||||
static int input;
|
private static final Scanner sc = new Scanner(System.in);
|
||||||
static boolean quit = false;
|
private static int input;
|
||||||
|
private static boolean quit = false;
|
||||||
|
|
||||||
public static void startInteractive() {
|
public static void startInteractive() {
|
||||||
|
|
||||||
@@ -26,6 +27,7 @@ public class InteractiveInterface {
|
|||||||
//Need to re-do the CDR3/CDR1 matching to correspond to new pattern
|
//Need to re-do the CDR3/CDR1 matching to correspond to new pattern
|
||||||
//System.out.println("5) Generate CDR3/CDR1 occupancy graph");
|
//System.out.println("5) Generate CDR3/CDR1 occupancy graph");
|
||||||
//System.out.println("6) Simulate CDR3/CDR1 T cell matching");
|
//System.out.println("6) Simulate CDR3/CDR1 T cell matching");
|
||||||
|
System.out.println("8) Options");
|
||||||
System.out.println("9) About/Acknowledgments");
|
System.out.println("9) About/Acknowledgments");
|
||||||
System.out.println("0) Exit");
|
System.out.println("0) Exit");
|
||||||
try {
|
try {
|
||||||
@@ -36,9 +38,10 @@ public class InteractiveInterface {
|
|||||||
case 3 -> makeCDR3Graph();
|
case 3 -> makeCDR3Graph();
|
||||||
case 4 -> matchCDR3s();
|
case 4 -> matchCDR3s();
|
||||||
//case 6 -> matchCellsCDR1();
|
//case 6 -> matchCellsCDR1();
|
||||||
|
case 8 -> mainOptions();
|
||||||
case 9 -> acknowledge();
|
case 9 -> acknowledge();
|
||||||
case 0 -> quit = true;
|
case 0 -> quit = true;
|
||||||
default -> throw new InputMismatchException("Invalid input.");
|
default -> System.out.println("Invalid input.");
|
||||||
}
|
}
|
||||||
} catch (InputMismatchException | IOException ex) {
|
} catch (InputMismatchException | IOException ex) {
|
||||||
System.out.println(ex);
|
System.out.println(ex);
|
||||||
@@ -71,11 +74,15 @@ public class InteractiveInterface {
|
|||||||
System.out.println(ex);
|
System.out.println(ex);
|
||||||
sc.next();
|
sc.next();
|
||||||
}
|
}
|
||||||
CellSample sample = Simulator.generateCellSample(numCells, cdr1Freq);
|
CellSample sample = new CellSample(numCells, cdr1Freq);
|
||||||
assert filename != null;
|
assert filename != null;
|
||||||
|
System.out.println("Writing cells to file");
|
||||||
CellFileWriter writer = new CellFileWriter(filename, sample);
|
CellFileWriter writer = new CellFileWriter(filename, sample);
|
||||||
writer.writeCellsToFile();
|
writer.writeCellsToFile();
|
||||||
System.gc();
|
System.out.println("Cell sample written to: " + filename);
|
||||||
|
if(BiGpairSEQ.cacheCells()) {
|
||||||
|
BiGpairSEQ.setCellSampleInMemory(sample, filename);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
//Output a CSV of sample plate
|
//Output a CSV of sample plate
|
||||||
@@ -85,7 +92,7 @@ public class InteractiveInterface {
|
|||||||
Double stdDev = 0.0;
|
Double stdDev = 0.0;
|
||||||
Integer numWells = 0;
|
Integer numWells = 0;
|
||||||
Integer numSections;
|
Integer numSections;
|
||||||
Integer[] concentrations = {1};
|
Integer[] populations = {1};
|
||||||
Double dropOutRate = 0.0;
|
Double dropOutRate = 0.0;
|
||||||
boolean poisson = false;
|
boolean poisson = false;
|
||||||
boolean exponential = false;
|
boolean exponential = false;
|
||||||
@@ -124,10 +131,11 @@ public class InteractiveInterface {
|
|||||||
}
|
}
|
||||||
case 3 -> {
|
case 3 -> {
|
||||||
exponential = true;
|
exponential = true;
|
||||||
System.out.println("Please enter lambda value for exponential distribution.");
|
System.out.print("Please enter lambda value for exponential distribution: ");
|
||||||
lambda = sc.nextDouble();
|
lambda = sc.nextDouble();
|
||||||
if (lambda <= 0.0) {
|
if (lambda <= 0.0) {
|
||||||
throw new InputMismatchException("Value must be positive.");
|
lambda = 0.6;
|
||||||
|
System.out.println("Value must be positive. Defaulting to 0.6.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
default -> {
|
default -> {
|
||||||
@@ -140,22 +148,57 @@ public class InteractiveInterface {
|
|||||||
if(numWells < 1){
|
if(numWells < 1){
|
||||||
throw new InputMismatchException("No wells on plate");
|
throw new InputMismatchException("No wells on plate");
|
||||||
}
|
}
|
||||||
System.out.println("\nThe plate can be evenly sectioned to allow multiple concentrations of T-cells/well");
|
//choose whether to make T cell population/well random
|
||||||
System.out.println("How many sections would you like to make (minimum 1)?");
|
boolean randomWellPopulations;
|
||||||
numSections = sc.nextInt();
|
System.out.println("Randomize number of T cells in each well? (y/n)");
|
||||||
if(numSections < 1) {
|
String ans = sc.next();
|
||||||
throw new InputMismatchException("Too few sections.");
|
Pattern pattern = Pattern.compile("(?:yes|y)", Pattern.CASE_INSENSITIVE);
|
||||||
|
Matcher matcher = pattern.matcher(ans);
|
||||||
|
if(matcher.matches()){
|
||||||
|
randomWellPopulations = true;
|
||||||
}
|
}
|
||||||
else if (numSections > numWells) {
|
else{
|
||||||
throw new InputMismatchException("Cannot have more sections than wells.");
|
randomWellPopulations = false;
|
||||||
}
|
}
|
||||||
int i = 1;
|
if(randomWellPopulations) { //if T cell population/well is random
|
||||||
concentrations = new Integer[numSections];
|
numSections = numWells;
|
||||||
while(numSections > 0) {
|
Integer minPop;
|
||||||
System.out.print("Enter number of T-cells per well in section " + i +": ");
|
Integer maxPop;
|
||||||
concentrations[i - 1] = sc.nextInt();
|
System.out.print("Please enter minimum number of T cells in a well: ");
|
||||||
i++;
|
minPop = sc.nextInt();
|
||||||
numSections--;
|
if(minPop < 1) {
|
||||||
|
throw new InputMismatchException("Minimum well population must be positive");
|
||||||
|
}
|
||||||
|
System.out.println("Please enter maximum number of T cells in a well: ");
|
||||||
|
maxPop = sc.nextInt();
|
||||||
|
if(maxPop < minPop) {
|
||||||
|
throw new InputMismatchException("Max well population must be greater than min well population");
|
||||||
|
}
|
||||||
|
//maximum should be inclusive, so need to add one to max of randomly generated values
|
||||||
|
populations = rand.ints(minPop, maxPop + 1)
|
||||||
|
.limit(numSections)
|
||||||
|
.boxed()
|
||||||
|
.toArray(Integer[]::new);
|
||||||
|
System.out.print("Populations: ");
|
||||||
|
System.out.println(Arrays.toString(populations));
|
||||||
|
}
|
||||||
|
else{ //if T cell population/well is not random
|
||||||
|
System.out.println("\nThe plate can be evenly sectioned to allow different numbers of T cells per well.");
|
||||||
|
System.out.println("How many sections would you like to make (minimum 1)?");
|
||||||
|
numSections = sc.nextInt();
|
||||||
|
if (numSections < 1) {
|
||||||
|
throw new InputMismatchException("Too few sections.");
|
||||||
|
} else if (numSections > numWells) {
|
||||||
|
throw new InputMismatchException("Cannot have more sections than wells.");
|
||||||
|
}
|
||||||
|
int i = 1;
|
||||||
|
populations = new Integer[numSections];
|
||||||
|
while (numSections > 0) {
|
||||||
|
System.out.print("Enter number of T cells per well in section " + i + ": ");
|
||||||
|
populations[i - 1] = sc.nextInt();
|
||||||
|
i++;
|
||||||
|
numSections--;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
System.out.println("\nErrors in amplification can induce a well dropout rate for sequences");
|
System.out.println("\nErrors in amplification can induce a well dropout rate for sequences");
|
||||||
System.out.print("Enter well dropout rate (0.0 to 1.0): ");
|
System.out.print("Enter well dropout rate (0.0 to 1.0): ");
|
||||||
@@ -167,27 +210,38 @@ public class InteractiveInterface {
|
|||||||
System.out.println(ex);
|
System.out.println(ex);
|
||||||
sc.next();
|
sc.next();
|
||||||
}
|
}
|
||||||
System.out.println("Reading Cell Sample file: " + cellFile);
|
|
||||||
assert cellFile != null;
|
assert cellFile != null;
|
||||||
CellFileReader cellReader = new CellFileReader(cellFile);
|
CellSample cells;
|
||||||
|
if (cellFile.equals(BiGpairSEQ.getCellFilename())){
|
||||||
|
cells = BiGpairSEQ.getCellSampleInMemory();
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
System.out.println("Reading Cell Sample file: " + cellFile);
|
||||||
|
CellFileReader cellReader = new CellFileReader(cellFile);
|
||||||
|
cells = cellReader.getCellSample();
|
||||||
|
if(BiGpairSEQ.cacheCells()) {
|
||||||
|
BiGpairSEQ.setCellSampleInMemory(cells, cellFile);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
assert filename != null;
|
||||||
|
Plate samplePlate;
|
||||||
|
PlateFileWriter writer;
|
||||||
if(exponential){
|
if(exponential){
|
||||||
Plate samplePlate = new Plate(numWells, dropOutRate, concentrations);
|
samplePlate = new Plate(cells, cellFile, numWells, populations, dropOutRate, lambda, true);
|
||||||
samplePlate.fillWellsExponential(cellReader.getFilename(), cellReader.getCells(), lambda);
|
writer = new PlateFileWriter(filename, samplePlate);
|
||||||
PlateFileWriter writer = new PlateFileWriter(filename, samplePlate);
|
|
||||||
writer.writePlateFile();
|
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
if (poisson) {
|
if (poisson) {
|
||||||
stdDev = Math.sqrt(cellReader.getCellCount()); //gaussian with square root of elements approximates poisson
|
stdDev = Math.sqrt(cells.getCellCount()); //gaussian with square root of elements approximates poisson
|
||||||
}
|
}
|
||||||
Plate samplePlate = new Plate(numWells, dropOutRate, concentrations);
|
samplePlate = new Plate(cells, cellFile, numWells, populations, dropOutRate, stdDev, false);
|
||||||
samplePlate.fillWells(cellReader.getFilename(), cellReader.getCells(), stdDev);
|
writer = new PlateFileWriter(filename, samplePlate);
|
||||||
assert filename != null;
|
}
|
||||||
PlateFileWriter writer = new PlateFileWriter(filename, samplePlate);
|
System.out.println("Writing Sample Plate to file");
|
||||||
System.out.println("Writing Sample Plate to file");
|
writer.writePlateFile();
|
||||||
writer.writePlateFile();
|
System.out.println("Sample Plate written to file: " + filename);
|
||||||
System.out.println("Sample Plate written to file: " + filename);
|
if(BiGpairSEQ.cachePlate()) {
|
||||||
System.gc();
|
BiGpairSEQ.setPlateInMemory(samplePlate, filename);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -196,7 +250,11 @@ public class InteractiveInterface {
|
|||||||
String filename = null;
|
String filename = null;
|
||||||
String cellFile = null;
|
String cellFile = null;
|
||||||
String plateFile = null;
|
String plateFile = null;
|
||||||
|
Boolean simulateReadDepth = false;
|
||||||
|
//number of times to read each sequence in a well
|
||||||
|
int readDepth = 1;
|
||||||
|
double readErrorRate = 0.0;
|
||||||
|
double errorCollisionRate = 0.0;
|
||||||
try {
|
try {
|
||||||
String str = "\nGenerating bipartite weighted graph encoding occupancy overlap data ";
|
String str = "\nGenerating bipartite weighted graph encoding occupancy overlap data ";
|
||||||
str = str.concat("\nrequires a cell sample file and a sample plate file.");
|
str = str.concat("\nrequires a cell sample file and a sample plate file.");
|
||||||
@@ -205,21 +263,73 @@ public class InteractiveInterface {
|
|||||||
cellFile = sc.next();
|
cellFile = sc.next();
|
||||||
System.out.print("\nPlease enter name of an existing sample plate file: ");
|
System.out.print("\nPlease enter name of an existing sample plate file: ");
|
||||||
plateFile = sc.next();
|
plateFile = sc.next();
|
||||||
System.out.println("\nThe graph and occupancy data will be written to a serialized binary file.");
|
System.out.println("\nEnable simulation of sequence read depth and sequence read errors? (y/n)");
|
||||||
|
System.out.println("NOTE: sample plate data cannot be cached when simulating read errors");
|
||||||
|
String ans = sc.next();
|
||||||
|
Pattern pattern = Pattern.compile("(?:yes|y)", Pattern.CASE_INSENSITIVE);
|
||||||
|
Matcher matcher = pattern.matcher(ans);
|
||||||
|
if(matcher.matches()){
|
||||||
|
simulateReadDepth = true;
|
||||||
|
}
|
||||||
|
if (simulateReadDepth) {
|
||||||
|
BiGpairSEQ.setCachePlate(false);
|
||||||
|
BiGpairSEQ.clearPlateInMemory();
|
||||||
|
System.out.print("\nPlease enter read depth (the integer number of reads per sequence): ");
|
||||||
|
readDepth = sc.nextInt();
|
||||||
|
if(readDepth < 1) {
|
||||||
|
throw new InputMismatchException("The read depth must be an integer >= 1");
|
||||||
|
}
|
||||||
|
System.out.print("\nPlease enter probability of a sequence read error (0.0 to 1.0): ");
|
||||||
|
readErrorRate = sc.nextDouble();
|
||||||
|
if(readErrorRate < 0.0 || readErrorRate > 1.0) {
|
||||||
|
throw new InputMismatchException("The read error rate must be in the range [0.0, 1.0]");
|
||||||
|
}
|
||||||
|
System.out.println("\nPlease enter the probability of read error collision");
|
||||||
|
System.out.println("(the likelihood that two read errors produce the same spurious sequence)");
|
||||||
|
System.out.print("(0.0 to 1.0): ");
|
||||||
|
errorCollisionRate = sc.nextDouble();
|
||||||
|
if(errorCollisionRate < 0.0 || errorCollisionRate > 1.0) {
|
||||||
|
throw new InputMismatchException("The error collision probability must be an in the range [0.0, 1.0]");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
System.out.println("\nThe graph and occupancy data will be written to a file.");
|
||||||
System.out.print("Please enter a name for the output file: ");
|
System.out.print("Please enter a name for the output file: ");
|
||||||
filename = sc.next();
|
filename = sc.next();
|
||||||
} catch (InputMismatchException ex) {
|
} catch (InputMismatchException ex) {
|
||||||
System.out.println(ex);
|
System.out.println(ex);
|
||||||
sc.next();
|
sc.next();
|
||||||
}
|
}
|
||||||
System.out.println("Reading Cell Sample file: " + cellFile);
|
|
||||||
assert cellFile != null;
|
assert cellFile != null;
|
||||||
CellFileReader cellReader = new CellFileReader(cellFile);
|
CellSample cellSample;
|
||||||
System.out.println("Reading Sample Plate file: " + plateFile);
|
//check if cells are already in memory
|
||||||
|
if(cellFile.equals(BiGpairSEQ.getCellFilename()) && BiGpairSEQ.getCellSampleInMemory() != null) {
|
||||||
|
cellSample = BiGpairSEQ.getCellSampleInMemory();
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
System.out.println("Reading Cell Sample file: " + cellFile);
|
||||||
|
CellFileReader cellReader = new CellFileReader(cellFile);
|
||||||
|
cellSample = cellReader.getCellSample();
|
||||||
|
if(BiGpairSEQ.cacheCells()) {
|
||||||
|
BiGpairSEQ.setCellSampleInMemory(cellSample, cellFile);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
assert plateFile != null;
|
assert plateFile != null;
|
||||||
PlateFileReader plateReader = new PlateFileReader(plateFile);
|
Plate plate;
|
||||||
Plate plate = new Plate(plateReader.getFilename(), plateReader.getWells());
|
//check if plate is already in memory
|
||||||
if (cellReader.getCells().size() == 0){
|
if(plateFile.equals(BiGpairSEQ.getPlateFilename())){
|
||||||
|
plate = BiGpairSEQ.getPlateInMemory();
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
System.out.println("Reading Sample Plate file: " + plateFile);
|
||||||
|
PlateFileReader plateReader = new PlateFileReader(plateFile);
|
||||||
|
plate = plateReader.getSamplePlate();
|
||||||
|
if(BiGpairSEQ.cachePlate()) {
|
||||||
|
BiGpairSEQ.setPlateInMemory(plate, plateFile);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (cellSample.getCells().size() == 0){
|
||||||
System.out.println("No cell sample found.");
|
System.out.println("No cell sample found.");
|
||||||
System.out.println("Returning to main menu.");
|
System.out.println("Returning to main menu.");
|
||||||
}
|
}
|
||||||
@@ -228,13 +338,22 @@ public class InteractiveInterface {
|
|||||||
System.out.println("Returning to main menu.");
|
System.out.println("Returning to main menu.");
|
||||||
}
|
}
|
||||||
else{
|
else{
|
||||||
List<Integer[]> cells = cellReader.getCells();
|
GraphWithMapData data = Simulator.makeCDR3Graph(cellSample, plate, readDepth, readErrorRate, errorCollisionRate, true);
|
||||||
GraphWithMapData data = Simulator.makeGraph(cells, plate, true);
|
|
||||||
assert filename != null;
|
assert filename != null;
|
||||||
GraphDataObjectWriter dataWriter = new GraphDataObjectWriter(filename, data);
|
if(BiGpairSEQ.outputBinary()) {
|
||||||
dataWriter.writeDataToFile();
|
GraphDataObjectWriter dataWriter = new GraphDataObjectWriter(filename, data);
|
||||||
System.out.println("Graph and Data file written to: " + filename);
|
dataWriter.writeDataToFile();
|
||||||
System.gc();
|
System.out.println("Serialized binary graph/data file written to: " + filename);
|
||||||
|
}
|
||||||
|
if(BiGpairSEQ.outputGraphML()) {
|
||||||
|
GraphMLFileWriter graphMLWriter = new GraphMLFileWriter(filename, data);
|
||||||
|
graphMLWriter.writeGraphToFile();
|
||||||
|
System.out.println("GraphML file written to: " + filename);
|
||||||
|
}
|
||||||
|
if(BiGpairSEQ.cacheGraph()) {
|
||||||
|
BiGpairSEQ.setGraphInMemory(data, filename);
|
||||||
|
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -256,17 +375,28 @@ public class InteractiveInterface {
|
|||||||
System.out.println("\nWhat is the minimum number of CDR3 alpha/beta overlap wells to attempt matching?");
|
System.out.println("\nWhat is the minimum number of CDR3 alpha/beta overlap wells to attempt matching?");
|
||||||
lowThreshold = sc.nextInt();
|
lowThreshold = sc.nextInt();
|
||||||
if(lowThreshold < 1){
|
if(lowThreshold < 1){
|
||||||
throw new InputMismatchException("Minimum value for low threshold set to 1");
|
lowThreshold = 1;
|
||||||
|
System.out.println("Value for low occupancy overlap threshold must be positive");
|
||||||
|
System.out.println("Value for low occupancy overlap threshold set to 1");
|
||||||
}
|
}
|
||||||
System.out.println("\nWhat is the maximum number of CDR3 alpha/beta overlap wells to attempt matching?");
|
System.out.println("\nWhat is the maximum number of CDR3 alpha/beta overlap wells to attempt matching?");
|
||||||
highThreshold = sc.nextInt();
|
highThreshold = sc.nextInt();
|
||||||
System.out.println("\nWhat is the maximum difference in alpha/beta occupancy to attempt matching?");
|
if(highThreshold < lowThreshold) {
|
||||||
maxOccupancyDiff = sc.nextInt();
|
highThreshold = lowThreshold;
|
||||||
System.out.println("\nWell overlap percentage = pair overlap / sequence occupancy");
|
System.out.println("Value for high occupancy overlap threshold must be >= low overlap threshold");
|
||||||
System.out.println("What is the minimum well overlap percentage to attempt matching? (0 to 100)");
|
System.out.println("Value for high occupancy overlap threshold set to " + lowThreshold);
|
||||||
|
}
|
||||||
|
System.out.println("What is the minimum percentage of a sequence's wells in alpha/beta overlap to attempt matching? (0 - 100)");
|
||||||
minOverlapPercent = sc.nextInt();
|
minOverlapPercent = sc.nextInt();
|
||||||
if (minOverlapPercent < 0 || minOverlapPercent > 100) {
|
if (minOverlapPercent < 0 || minOverlapPercent > 100) {
|
||||||
throw new InputMismatchException("Value outside range. Minimum percent set to 0");
|
System.out.println("Value outside range. Minimum occupancy overlap percentage set to 0");
|
||||||
|
}
|
||||||
|
System.out.println("\nWhat is the maximum difference in alpha/beta occupancy to attempt matching?");
|
||||||
|
maxOccupancyDiff = sc.nextInt();
|
||||||
|
if (maxOccupancyDiff < 0) {
|
||||||
|
maxOccupancyDiff = 0;
|
||||||
|
System.out.println("Maximum allowable difference in alpha/beta occupancy must be nonnegative");
|
||||||
|
System.out.println("Maximum allowable difference in alpha/beta occupancy set to 0");
|
||||||
}
|
}
|
||||||
} catch (InputMismatchException ex) {
|
} catch (InputMismatchException ex) {
|
||||||
System.out.println(ex);
|
System.out.println(ex);
|
||||||
@@ -275,17 +405,15 @@ public class InteractiveInterface {
|
|||||||
assert graphFilename != null;
|
assert graphFilename != null;
|
||||||
//check if this is the same graph we already have in memory.
|
//check if this is the same graph we already have in memory.
|
||||||
GraphWithMapData data;
|
GraphWithMapData data;
|
||||||
if(!(graphFilename.equals(BiGpairSEQ.getGraphFilename())) || BiGpairSEQ.getGraph() == null) {
|
if(graphFilename.equals(BiGpairSEQ.getGraphFilename())) {
|
||||||
BiGpairSEQ.clearGraph();
|
data = BiGpairSEQ.getGraphInMemory();
|
||||||
//read object data from file
|
|
||||||
GraphDataObjectReader dataReader = new GraphDataObjectReader(graphFilename);
|
|
||||||
data = dataReader.getData();
|
|
||||||
//set new graph in memory and new filename
|
|
||||||
BiGpairSEQ.setGraph(data);
|
|
||||||
BiGpairSEQ.setGraphFilename(graphFilename);
|
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
data = BiGpairSEQ.getGraph();
|
GraphDataObjectReader dataReader = new GraphDataObjectReader(graphFilename, true);
|
||||||
|
data = dataReader.getData();
|
||||||
|
if(BiGpairSEQ.cacheGraph()) {
|
||||||
|
BiGpairSEQ.setGraphInMemory(data, graphFilename);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
//simulate matching
|
//simulate matching
|
||||||
MatchingResult results = Simulator.matchCDR3s(data, graphFilename, lowThreshold, highThreshold, maxOccupancyDiff,
|
MatchingResult results = Simulator.matchCDR3s(data, graphFilename, lowThreshold, highThreshold, maxOccupancyDiff,
|
||||||
@@ -296,7 +424,6 @@ public class InteractiveInterface {
|
|||||||
System.out.println("Writing results to file");
|
System.out.println("Writing results to file");
|
||||||
writer.writeResultsToFile();
|
writer.writeResultsToFile();
|
||||||
System.out.println("Results written to file: " + filename);
|
System.out.println("Results written to file: " + filename);
|
||||||
System.gc();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
///////
|
///////
|
||||||
@@ -403,7 +530,82 @@ public class InteractiveInterface {
|
|||||||
// }
|
// }
|
||||||
// }
|
// }
|
||||||
|
|
||||||
|
private static void mainOptions(){
|
||||||
|
boolean backToMain = false;
|
||||||
|
while(!backToMain) {
|
||||||
|
System.out.println("\n--------------OPTIONS---------------");
|
||||||
|
System.out.println("1) Turn " + getOnOff(!BiGpairSEQ.cacheCells()) + " cell sample file caching");
|
||||||
|
System.out.println("2) Turn " + getOnOff(!BiGpairSEQ.cachePlate()) + " plate file caching");
|
||||||
|
System.out.println("3) Turn " + getOnOff(!BiGpairSEQ.cacheGraph()) + " graph/data file caching");
|
||||||
|
System.out.println("4) Turn " + getOnOff(!BiGpairSEQ.outputBinary()) + " serialized binary graph output");
|
||||||
|
System.out.println("5) Turn " + getOnOff(!BiGpairSEQ.outputGraphML()) + " GraphML graph output (for data portability to other programs)");
|
||||||
|
System.out.println("6) Maximum weight matching algorithm options");
|
||||||
|
System.out.println("0) Return to main menu");
|
||||||
|
try {
|
||||||
|
input = sc.nextInt();
|
||||||
|
switch (input) {
|
||||||
|
case 1 -> BiGpairSEQ.setCacheCells(!BiGpairSEQ.cacheCells());
|
||||||
|
case 2 -> BiGpairSEQ.setCachePlate(!BiGpairSEQ.cachePlate());
|
||||||
|
case 3 -> BiGpairSEQ.setCacheGraph(!BiGpairSEQ.cacheGraph());
|
||||||
|
case 4 -> BiGpairSEQ.setOutputBinary(!BiGpairSEQ.outputBinary());
|
||||||
|
case 5 -> BiGpairSEQ.setOutputGraphML(!BiGpairSEQ.outputGraphML());
|
||||||
|
case 6 -> algorithmOptions();
|
||||||
|
case 0 -> backToMain = true;
|
||||||
|
default -> System.out.println("Invalid input");
|
||||||
|
}
|
||||||
|
} catch (InputMismatchException ex) {
|
||||||
|
System.out.println(ex);
|
||||||
|
sc.next();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper function for printing menu items in mainOptions(). Returns a string based on the value of parameter.
|
||||||
|
*
|
||||||
|
* @param b - a boolean value
|
||||||
|
* @return String "on" if b is true, "off" if b is false
|
||||||
|
*/
|
||||||
|
private static String getOnOff(boolean b) {
|
||||||
|
if (b) { return "on";}
|
||||||
|
else { return "off"; }
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void algorithmOptions(){
|
||||||
|
boolean backToOptions = false;
|
||||||
|
while(!backToOptions) {
|
||||||
|
System.out.println("\n---------ALGORITHM OPTIONS----------");
|
||||||
|
System.out.println("1) Use scaling algorithm by Duan and Su.");
|
||||||
|
System.out.println("2) Use LEDA book algorithm with Fibonacci heap priority queue");
|
||||||
|
System.out.println("3) Use LEDA book algorithm with pairing heap priority queue");
|
||||||
|
System.out.println("0) Return to Options menu");
|
||||||
|
try {
|
||||||
|
input = sc.nextInt();
|
||||||
|
switch (input) {
|
||||||
|
case 1 -> System.out.println("This option is not yet implemented. Choose another.");
|
||||||
|
case 2 -> {
|
||||||
|
BiGpairSEQ.setFibonacciHeap();
|
||||||
|
System.out.println("MWM algorithm set to LEDA with Fibonacci heap");
|
||||||
|
backToOptions = true;
|
||||||
|
}
|
||||||
|
case 3 -> {
|
||||||
|
BiGpairSEQ.setPairingHeap();
|
||||||
|
System.out.println("MWM algorithm set to LEDA with pairing heap");
|
||||||
|
backToOptions = true;
|
||||||
|
}
|
||||||
|
case 0 -> backToOptions = true;
|
||||||
|
default -> System.out.println("Invalid input");
|
||||||
|
}
|
||||||
|
} catch (InputMismatchException ex) {
|
||||||
|
System.out.println(ex);
|
||||||
|
sc.next();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private static void acknowledge(){
|
private static void acknowledge(){
|
||||||
|
System.out.println("BiGpairSEQ_Sim " + BiGpairSEQ.getVersion());
|
||||||
|
System.out.println();
|
||||||
System.out.println("This program simulates BiGpairSEQ, a graph theory based adaptation");
|
System.out.println("This program simulates BiGpairSEQ, a graph theory based adaptation");
|
||||||
System.out.println("of the pairSEQ algorithm for pairing T cell receptor sequences.");
|
System.out.println("of the pairSEQ algorithm for pairing T cell receptor sequences.");
|
||||||
System.out.println();
|
System.out.println();
|
||||||
|
|||||||
@@ -9,27 +9,34 @@ public class MatchingResult {
|
|||||||
private final List<String> comments;
|
private final List<String> comments;
|
||||||
private final List<String> headers;
|
private final List<String> headers;
|
||||||
private final List<List<String>> allResults;
|
private final List<List<String>> allResults;
|
||||||
private final Map<Integer, Integer> matchMap;
|
private final Map<String, String> matchMap;
|
||||||
private final Duration time;
|
|
||||||
|
|
||||||
public MatchingResult(Map<String, String> metadata, List<String> headers,
|
public MatchingResult(Map<String, String> metadata, List<String> headers,
|
||||||
List<List<String>> allResults, Map<Integer, Integer>matchMap, Duration time){
|
List<List<String>> allResults, Map<String, String>matchMap){
|
||||||
/*
|
/*
|
||||||
* POSSIBLE KEYS FOR METADATA MAP ARE:
|
* POSSIBLE KEYS FOR METADATA MAP ARE:
|
||||||
* sample plate filename *
|
* sample plate filename *
|
||||||
* graph filename *
|
* graph filename *
|
||||||
|
* matching weight *
|
||||||
* well populations *
|
* well populations *
|
||||||
* total alphas found *
|
* sequence read depth *
|
||||||
* total betas found *
|
* sequence read error rate *
|
||||||
* high overlap threshold
|
* read error collision rate *
|
||||||
* low overlap threshold
|
* total alphas read from plate *
|
||||||
* maximum occupancy difference
|
* total betas read from plate *
|
||||||
* minimum overlap percent
|
* alphas in graph (after pre-filtering) *
|
||||||
* pairing attempt rate
|
* betas in graph (after pre-filtering) *
|
||||||
* correct pairing count
|
* high overlap threshold for pairing *
|
||||||
* incorrect pairing count
|
* low overlap threshold for pairing *
|
||||||
* pairing error rate
|
* maximum occupancy difference for pairing *
|
||||||
* simulation time
|
* minimum overlap percent for pairing *
|
||||||
|
* pairing attempt rate *
|
||||||
|
* correct pairing count *
|
||||||
|
* incorrect pairing count *
|
||||||
|
* pairing error rate *
|
||||||
|
* time to generate graph (seconds) *
|
||||||
|
* time to pair sequences (seconds) *
|
||||||
|
* total simulation time (seconds) *
|
||||||
*/
|
*/
|
||||||
this.metadata = metadata;
|
this.metadata = metadata;
|
||||||
this.comments = new ArrayList<>();
|
this.comments = new ArrayList<>();
|
||||||
@@ -39,8 +46,6 @@ public class MatchingResult {
|
|||||||
this.headers = headers;
|
this.headers = headers;
|
||||||
this.allResults = allResults;
|
this.allResults = allResults;
|
||||||
this.matchMap = matchMap;
|
this.matchMap = matchMap;
|
||||||
this.time = time;
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public Map<String, String> getMetadata() {return metadata;}
|
public Map<String, String> getMetadata() {return metadata;}
|
||||||
@@ -57,13 +62,13 @@ public class MatchingResult {
|
|||||||
return headers;
|
return headers;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Map<Integer, Integer> getMatchMap() {
|
public Map<String, String> getMatchMap() {
|
||||||
return matchMap;
|
return matchMap;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Duration getTime() {
|
// public Duration getTime() {
|
||||||
return time;
|
// return time;
|
||||||
}
|
// }
|
||||||
|
|
||||||
public String getPlateFilename() {
|
public String getPlateFilename() {
|
||||||
return metadata.get("sample plate filename");
|
return metadata.get("sample plate filename");
|
||||||
@@ -84,13 +89,29 @@ public class MatchingResult {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public Integer getAlphaCount() {
|
public Integer getAlphaCount() {
|
||||||
return Integer.parseInt(metadata.get("total alpha count"));
|
return Integer.parseInt(metadata.get("total alphas read from plate"));
|
||||||
}
|
}
|
||||||
|
|
||||||
public Integer getBetaCount() {
|
public Integer getBetaCount() {
|
||||||
return Integer.parseInt(metadata.get("total beta count"));
|
return Integer.parseInt(metadata.get("total betas read from plate"));
|
||||||
}
|
}
|
||||||
|
|
||||||
//put in the rest of these methods following the same pattern
|
public Integer getHighOverlapThreshold() { return Integer.parseInt(metadata.get("high overlap threshold for pairing"));}
|
||||||
|
|
||||||
|
public Integer getLowOverlapThreshold() { return Integer.parseInt(metadata.get("low overlap threshold for pairing"));}
|
||||||
|
|
||||||
|
public Integer getMaxOccupancyDifference() { return Integer.parseInt(metadata.get("maximum occupancy difference for pairing"));}
|
||||||
|
|
||||||
|
public Integer getMinOverlapPercent() { return Integer.parseInt(metadata.get("minimum overlap percent for pairing"));}
|
||||||
|
|
||||||
|
public Double getPairingAttemptRate() { return Double.parseDouble(metadata.get("pairing attempt rate"));}
|
||||||
|
|
||||||
|
public Integer getCorrectPairingCount() { return Integer.parseInt(metadata.get("correct pairing count"));}
|
||||||
|
|
||||||
|
public Integer getIncorrectPairingCount() { return Integer.parseInt(metadata.get("incorrect pairing count"));}
|
||||||
|
|
||||||
|
public Double getPairingErrorRate() { return Double.parseDouble(metadata.get("pairing error rate"));}
|
||||||
|
|
||||||
|
public String getSimulationTime() { return metadata.get("total simulation time (seconds)"); }
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,12 +5,15 @@ TODO: Implement exponential distribution using inversion method - DONE
|
|||||||
TODO: Implement discrete frequency distributions using Vose's Alias Method
|
TODO: Implement discrete frequency distributions using Vose's Alias Method
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
|
|
||||||
public class Plate {
|
public class Plate {
|
||||||
|
private CellSample cells;
|
||||||
private String sourceFile;
|
private String sourceFile;
|
||||||
private List<List<Integer[]>> wells;
|
private String filename;
|
||||||
private Random rand = new Random();
|
private List<List<String[]>> wells;
|
||||||
|
private final Random rand = BiGpairSEQ.getRand();
|
||||||
private int size;
|
private int size;
|
||||||
private double error;
|
private double error;
|
||||||
private Integer[] populations;
|
private Integer[] populations;
|
||||||
@@ -18,6 +21,25 @@ public class Plate {
|
|||||||
private double lambda;
|
private double lambda;
|
||||||
boolean exponential = false;
|
boolean exponential = false;
|
||||||
|
|
||||||
|
public Plate(CellSample cells, String cellFilename, int numWells, Integer[] populations,
|
||||||
|
double dropoutRate, double stdDev_or_lambda, boolean exponential){
|
||||||
|
this.cells = cells;
|
||||||
|
this.sourceFile = cellFilename;
|
||||||
|
this.size = numWells;
|
||||||
|
this.wells = new ArrayList<>();
|
||||||
|
this.error = dropoutRate;
|
||||||
|
this.populations = populations;
|
||||||
|
this.exponential = exponential;
|
||||||
|
if (this.exponential) {
|
||||||
|
this.lambda = stdDev_or_lambda;
|
||||||
|
fillWellsExponential(cells.getCells(), this.lambda);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
this.stdDev = stdDev_or_lambda;
|
||||||
|
fillWells(cells.getCells(), this.stdDev);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
public Plate(int size, double error, Integer[] populations) {
|
public Plate(int size, double error, Integer[] populations) {
|
||||||
this.size = size;
|
this.size = size;
|
||||||
@@ -26,13 +48,14 @@ public class Plate {
|
|||||||
wells = new ArrayList<>();
|
wells = new ArrayList<>();
|
||||||
}
|
}
|
||||||
|
|
||||||
public Plate(String sourceFileName, List<List<Integer[]>> wells) {
|
//constructor for returning a Plate from a PlateFileReader
|
||||||
this.sourceFile = sourceFileName;
|
public Plate(String filename, List<List<String[]>> wells) {
|
||||||
|
this.filename = filename;
|
||||||
this.wells = wells;
|
this.wells = wells;
|
||||||
this.size = wells.size();
|
this.size = wells.size();
|
||||||
|
|
||||||
List<Integer> concentrations = new ArrayList<>();
|
List<Integer> concentrations = new ArrayList<>();
|
||||||
for (List<Integer[]> w: wells) {
|
for (List<String[]> w: wells) {
|
||||||
if(!concentrations.contains(w.size())){
|
if(!concentrations.contains(w.size())){
|
||||||
concentrations.add(w.size());
|
concentrations.add(w.size());
|
||||||
}
|
}
|
||||||
@@ -43,35 +66,26 @@ public class Plate {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void fillWellsExponential(String sourceFileName, List<Integer[]> cells, double lambda){
|
private void fillWellsExponential(List<String[]> cells, double lambda){
|
||||||
this.lambda = lambda;
|
this.lambda = lambda;
|
||||||
exponential = true;
|
exponential = true;
|
||||||
sourceFile = sourceFileName;
|
|
||||||
int numSections = populations.length;
|
int numSections = populations.length;
|
||||||
int section = 0;
|
int section = 0;
|
||||||
double m;
|
double m;
|
||||||
int n;
|
int n;
|
||||||
int test=0;
|
|
||||||
while (section < numSections){
|
while (section < numSections){
|
||||||
for (int i = 0; i < (size / numSections); i++) {
|
for (int i = 0; i < (size / numSections); i++) {
|
||||||
List<Integer[]> well = new ArrayList<>();
|
List<String[]> well = new ArrayList<>();
|
||||||
for (int j = 0; j < populations[section]; j++) {
|
for (int j = 0; j < populations[section]; j++) {
|
||||||
do {
|
do {
|
||||||
//inverse transform sampling: for random number u in [0,1), x = log(1-u) / (-lambda)
|
//inverse transform sampling: for random number u in [0,1), x = log(1-u) / (-lambda)
|
||||||
m = (Math.log10((1 - rand.nextDouble()))/(-lambda)) * Math.sqrt(cells.size());
|
m = (Math.log10((1 - rand.nextDouble()))/(-lambda)) * Math.sqrt(cells.size());
|
||||||
} while (m >= cells.size() || m < 0);
|
} while (m >= cells.size() || m < 0);
|
||||||
n = (int) Math.floor(m);
|
n = (int) Math.floor(m);
|
||||||
//n = Equations.getRandomNumber(0, cells.size());
|
String[] cellToAdd = cells.get(n).clone();
|
||||||
// was testing generating the cell sample file with exponential dist, then sampling flat here
|
|
||||||
//that would be more realistic
|
|
||||||
//But would mess up other things in the simulation with how I've coded it.
|
|
||||||
if(n > test){
|
|
||||||
test = n;
|
|
||||||
}
|
|
||||||
Integer[] cellToAdd = cells.get(n).clone();
|
|
||||||
for(int k = 0; k < cellToAdd.length; k++){
|
for(int k = 0; k < cellToAdd.length; k++){
|
||||||
if(Math.abs(rand.nextDouble()) < error){//error applied to each seqeunce
|
if(Math.abs(rand.nextDouble()) <= error){//error applied to each sequence
|
||||||
cellToAdd[k] = -1;
|
cellToAdd[k] = "-1";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
well.add(cellToAdd);
|
well.add(cellToAdd);
|
||||||
@@ -80,28 +94,26 @@ public class Plate {
|
|||||||
}
|
}
|
||||||
section++;
|
section++;
|
||||||
}
|
}
|
||||||
System.out.println("Highest index: " +test);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void fillWells(String sourceFileName, List<Integer[]> cells, double stdDev) {
|
private void fillWells( List<String[]> cells, double stdDev) {
|
||||||
this.stdDev = stdDev;
|
this.stdDev = stdDev;
|
||||||
sourceFile = sourceFileName;
|
|
||||||
int numSections = populations.length;
|
int numSections = populations.length;
|
||||||
int section = 0;
|
int section = 0;
|
||||||
double m;
|
double m;
|
||||||
int n;
|
int n;
|
||||||
while (section < numSections){
|
while (section < numSections){
|
||||||
for (int i = 0; i < (size / numSections); i++) {
|
for (int i = 0; i < (size / numSections); i++) {
|
||||||
List<Integer[]> well = new ArrayList<>();
|
List<String[]> well = new ArrayList<>();
|
||||||
for (int j = 0; j < populations[section]; j++) {
|
for (int j = 0; j < populations[section]; j++) {
|
||||||
do {
|
do {
|
||||||
m = (rand.nextGaussian() * stdDev) + (cells.size() / 2);
|
m = (rand.nextGaussian() * stdDev) + (cells.size() / 2);
|
||||||
} while (m >= cells.size() || m < 0);
|
} while (m >= cells.size() || m < 0);
|
||||||
n = (int) Math.floor(m);
|
n = (int) Math.floor(m);
|
||||||
Integer[] cellToAdd = cells.get(n).clone();
|
String[] cellToAdd = cells.get(n).clone();
|
||||||
for(int k = 0; k < cellToAdd.length; k++){
|
for(int k = 0; k < cellToAdd.length; k++){
|
||||||
if(Math.abs(rand.nextDouble()) < error){//error applied to each sequence
|
if(Math.abs(rand.nextDouble()) < error){//error applied to each sequence
|
||||||
cellToAdd[k] = -1;
|
cellToAdd[k] = "-1";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
well.add(cellToAdd);
|
well.add(cellToAdd);
|
||||||
@@ -132,40 +144,188 @@ public class Plate {
|
|||||||
return error;
|
return error;
|
||||||
}
|
}
|
||||||
|
|
||||||
public List<List<Integer[]>> getWells() {
|
public List<List<String[]>> getWells() {
|
||||||
return wells;
|
return wells;
|
||||||
}
|
}
|
||||||
|
|
||||||
//returns a map of the counts of the sequence at cell index sIndex, in all wells
|
// //returns a map of the counts of the sequence at cell index sIndex, in all wells
|
||||||
public Map<Integer, Integer> assayWellsSequenceS(int... sIndices){
|
// public void assayWellsSequenceS(Map<String, Integer> sequences, int... sIndices){
|
||||||
return this.assayWellsSequenceS(0, size, sIndices);
|
// this.assayWellsSequenceS(sequences, 0, size, sIndices);
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
|
// //returns a map of the counts of the sequence at cell index sIndex, in a specific well
|
||||||
|
// public void assayWellsSequenceS(Map<String, Integer> sequences, int n, int... sIndices) {
|
||||||
|
// this.assayWellsSequenceS(sequences, n, n+1, sIndices);
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// //returns a map of the counts of the sequence at cell index sIndex, in a range of wells
|
||||||
|
// public void assayWellsSequenceS(Map<String, Integer> sequences, int start, int end, int... sIndices) {
|
||||||
|
// for(int sIndex: sIndices){
|
||||||
|
// for(int i = start; i < end; i++){
|
||||||
|
// countSequences(sequences, wells.get(i), sIndex);
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// //For the sequences at cell indices sIndices, counts number of unique sequences in the given well into the given map
|
||||||
|
// private void countSequences(Map<String, Integer> wellMap, List<String[]> well, int... sIndices) {
|
||||||
|
// for(String[] cell : well) {
|
||||||
|
// for(int sIndex: sIndices){
|
||||||
|
// //skip dropout sequences, which have value -1
|
||||||
|
// if(!"-1".equals(cell[sIndex])){
|
||||||
|
// wellMap.merge(cell[sIndex], 1, (oldValue, newValue) -> oldValue + newValue);
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
|
||||||
//returns a map of the counts of the sequence at cell index sIndex, in a specific well
|
//For the sequences at cell indices sIndices, counts number of unique sequences in all well into the given map
|
||||||
public Map<Integer, Integer> assayWellsSequenceS(int n, int... sIndices) { return this.assayWellsSequenceS(n, n+1, sIndices);}
|
public Map<String, SequenceRecord> countSequences(Integer readDepth, Double readErrorRate,
|
||||||
|
Double errorCollisionRate, int... sIndices) {
|
||||||
//returns a map of the counts of the sequence at cell index sIndex, in a range of wells
|
SequenceType[] sequenceTypes = EnumSet.allOf(SequenceType.class).toArray(new SequenceType[0]);
|
||||||
public Map<Integer, Integer> assayWellsSequenceS(int start, int end, int... sIndices) {
|
Map<String, Integer> distinctMisreadCounts = new HashMap<>();
|
||||||
Map<Integer,Integer> assay = new HashMap<>();
|
Map<String, SequenceRecord> sequenceMap = new LinkedHashMap<>();
|
||||||
for(int pIndex: sIndices){
|
for (int well = 0; well < size; well++) {
|
||||||
for(int i = start; i < end; i++){
|
for (String[] cell : wells.get(well)) {
|
||||||
countSequences(assay, wells.get(i), pIndex);
|
for (int sIndex : sIndices) {
|
||||||
}
|
//skip dropout sequences, which have value -1
|
||||||
}
|
if (!"-1".equals(cell[sIndex])) {
|
||||||
return assay;
|
for (int j = 0; j < readDepth; j++) {
|
||||||
}
|
//Misread sequence
|
||||||
//For the sequences at cell indices sIndices, counts number of unique sequences in the given well into the given map
|
if (rand.nextDouble() < readErrorRate) {
|
||||||
private void countSequences(Map<Integer, Integer> wellMap, List<Integer[]> well, int... sIndices) {
|
StringBuilder spurious = new StringBuilder(cell[sIndex]);
|
||||||
for(Integer[] cell : well) {
|
//if this sequence hasn't been misread before, or the read error is unique,
|
||||||
for(int sIndex: sIndices){
|
//append one more "*" than has been appended before
|
||||||
if(cell[sIndex] != -1){
|
if (rand.nextDouble() > errorCollisionRate || !distinctMisreadCounts.containsKey(cell[sIndex])) {
|
||||||
wellMap.merge(cell[sIndex], 1, (oldValue, newValue) -> oldValue + newValue);
|
distinctMisreadCounts.merge(cell[sIndex], 1, (oldValue, newValue) -> oldValue + newValue);
|
||||||
|
for (int k = 0; k < distinctMisreadCounts.get(cell[sIndex]); k++) {
|
||||||
|
spurious.append("*");
|
||||||
|
}
|
||||||
|
SequenceRecord tmp = new SequenceRecord(spurious.toString(), sequenceTypes[sIndex]);
|
||||||
|
tmp.addRead(well);
|
||||||
|
sequenceMap.put(spurious.toString(), tmp);
|
||||||
|
}
|
||||||
|
//if this is a read error collision, randomly choose a number of "*"s that has been appended before
|
||||||
|
else {
|
||||||
|
int starCount = rand.nextInt(distinctMisreadCounts.get(cell[sIndex]));
|
||||||
|
for (int k = 0; k < starCount; k++) {
|
||||||
|
spurious.append("*");
|
||||||
|
}
|
||||||
|
sequenceMap.get(spurious.toString()).addRead(well);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
//sequence is read correctly
|
||||||
|
else {
|
||||||
|
if (!sequenceMap.containsKey(cell[sIndex])) {
|
||||||
|
SequenceRecord tmp = new SequenceRecord(cell[sIndex], sequenceTypes[sIndex]);
|
||||||
|
tmp.addRead(well);
|
||||||
|
sequenceMap.put(cell[sIndex], tmp);
|
||||||
|
} else {
|
||||||
|
sequenceMap.get(cell[sIndex]).addRead(well);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
return sequenceMap;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// //returns a map of the counts of the sequence at cell index sIndex, in all wells
|
||||||
|
// //Simulates read depth and read errors, counts the number of reads of a unique sequence into the given map.
|
||||||
|
// public void assayWellsSequenceSWithReadDepth(Map<String, Integer> misreadCounts, Map<String, Integer> occupancyMap, Map<String, Integer> readCountMap,
|
||||||
|
// int readDepth, double readErrorProb, double errorCollisionProb, int... sIndices) {
|
||||||
|
// this.assayWellsSequenceSWithReadDepth(misreadCounts, occupancyMap, readCountMap, readDepth, readErrorProb, errorCollisionProb, 0, size, sIndices);
|
||||||
|
// }
|
||||||
|
// //returns a map of the counts of the sequence at cell index sIndex, in a specific of wells
|
||||||
|
// //Simulates read depth and read errors, counts the number of reads of a unique sequence into the given map.
|
||||||
|
// public void assayWellsSequenceSWithReadDepth(Map<String, Integer> misreadCounts, Map<String, Integer> occupancyMap, Map<String, Integer> readCountMap,
|
||||||
|
// int readDepth, double readErrorProb, double errorCollisionProb,
|
||||||
|
// int n, int... sIndices) {
|
||||||
|
// this.assayWellsSequenceSWithReadDepth(misreadCounts, occupancyMap, readCountMap, readDepth, readErrorProb, errorCollisionProb, n, n+1, sIndices);
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// //returns a map of the counts of the sequence at cell index sIndex, in a range of wells
|
||||||
|
// //Simulates read depth and read errors, counts the number of reads of a unique sequence into the given map.
|
||||||
|
// public void assayWellsSequenceSWithReadDepth(Map<String, Integer> misreadCounts, Map<String, Integer> occupancyMap, Map<String, Integer> readCountMap,
|
||||||
|
// int readDepth, double readErrorProb, double errorCollisionProb,
|
||||||
|
// int start, int end, int... sIndices) {
|
||||||
|
// for(int sIndex: sIndices){
|
||||||
|
// for(int i = start; i < end; i++){
|
||||||
|
// countSequencesWithReadDepth(misreadCounts, occupancyMap, readCountMap, readDepth, readErrorProb, errorCollisionProb, wells.get(i), sIndex);
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// //For the sequences at cell indices sIndices, counts number of unique sequences in the given well into the given map
|
||||||
|
// //Simulates read depth and read errors, counts the number of reads of a unique sequence into the given map.
|
||||||
|
// //NOTE: this function changes the content of the well, adding spurious cells to contain the misread sequences
|
||||||
|
// //(this is necessary because, in the simulation, the plate is read multiple times, but random misreads can only
|
||||||
|
// //be simulated once).
|
||||||
|
// //(Possibly I should refactor all of this to only require a single plate assay, to speed things up. Or at least
|
||||||
|
// //to see if it would speed things up.)
|
||||||
|
// private void countSequencesWithReadDepth(Map<String, Integer> distinctMisreadCounts, Map<String, Integer> occupancyMap, Map<String, Integer> readCountMap,
|
||||||
|
// int readDepth, double readErrorProb, double errorCollisionProb,
|
||||||
|
// List<String[]> well, int... sIndices) {
|
||||||
|
// //list of spurious cells to add to well after counting
|
||||||
|
// List<String[]> spuriousCells = new ArrayList<>();
|
||||||
|
// for(String[] cell : well) {
|
||||||
|
// //new potential spurious cell for each cell that gets read
|
||||||
|
// String[] spuriousCell = new String[SequenceType.values().length];
|
||||||
|
// //initialize spurious cell with all dropout sequences
|
||||||
|
// Arrays.fill(spuriousCell, "-1");
|
||||||
|
// //has a read error occurred?
|
||||||
|
// boolean readError = false;
|
||||||
|
// for(int sIndex: sIndices){
|
||||||
|
// //skip dropout sequences, which have value "-1"
|
||||||
|
// if(!"-1".equals(cell[sIndex])){
|
||||||
|
// Map<String, Integer> sequencesWithReadCounts = new LinkedHashMap<>();
|
||||||
|
// for(int i = 0; i < readDepth; i++) {
|
||||||
|
// if (rand.nextDouble() <= readErrorProb) {
|
||||||
|
// readError = true;
|
||||||
|
// //Read errors are represented by appending "*"s to the end of the sequence some number of times
|
||||||
|
// StringBuilder spurious = new StringBuilder(cell[sIndex]);
|
||||||
|
// //if this sequence hasn't been misread before, or the read error is unique,
|
||||||
|
// //append one more "*" than has been appended before
|
||||||
|
// if (!distinctMisreadCounts.containsKey(cell[sIndex]) || rand.nextDouble() > errorCollisionProb) {
|
||||||
|
// distinctMisreadCounts.merge(cell[sIndex], 1, (oldValue, newValue) -> oldValue + newValue);
|
||||||
|
// for (int j = 0; j < distinctMisreadCounts.get(cell[sIndex]); j++) {
|
||||||
|
// spurious.append("*");
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// //if this is a read error collision, randomly choose a number of "*"s that has been appended before
|
||||||
|
// else {
|
||||||
|
// int starCount = rand.nextInt(distinctMisreadCounts.get(cell[sIndex]));
|
||||||
|
// for (int j = 0; j < starCount; j++) {
|
||||||
|
// spurious.append("*");
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// sequencesWithReadCounts.merge(spurious.toString(), 1, (oldValue, newValue) -> oldValue + newValue);
|
||||||
|
// //add spurious sequence to spurious cell
|
||||||
|
// spuriousCell[sIndex] = spurious.toString();
|
||||||
|
// }
|
||||||
|
// else {
|
||||||
|
// sequencesWithReadCounts.merge(cell[sIndex], 1, (oldValue, newValue) -> oldValue + newValue);
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// for(String seq : sequencesWithReadCounts.keySet()) {
|
||||||
|
// occupancyMap.merge(seq, 1, (oldValue, newValue) -> oldValue + newValue);
|
||||||
|
// readCountMap.merge(seq, sequencesWithReadCounts.get(seq), (oldValue, newValue) -> oldValue + newValue);
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// if (readError) { //only add a new spurious cell if there was a read error
|
||||||
|
// spuriousCells.add(spuriousCell);
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// //add all spurious cells to the well
|
||||||
|
// well.addAll(spuriousCells);
|
||||||
|
// }
|
||||||
|
|
||||||
public String getSourceFileName() {
|
public String getSourceFileName() {
|
||||||
return sourceFile;
|
return sourceFile;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public String getFilename() { return filename; }
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ import java.util.regex.Pattern;
|
|||||||
|
|
||||||
public class PlateFileReader {
|
public class PlateFileReader {
|
||||||
|
|
||||||
private List<List<Integer[]>> wells = new ArrayList<>();
|
private List<List<String[]>> wells = new ArrayList<>();
|
||||||
private String filename;
|
private String filename;
|
||||||
|
|
||||||
public PlateFileReader(String filename){
|
public PlateFileReader(String filename){
|
||||||
@@ -32,17 +32,17 @@ public class PlateFileReader {
|
|||||||
CSVParser parser = new CSVParser(reader, plateFileFormat);
|
CSVParser parser = new CSVParser(reader, plateFileFormat);
|
||||||
){
|
){
|
||||||
for(CSVRecord record: parser.getRecords()) {
|
for(CSVRecord record: parser.getRecords()) {
|
||||||
List<Integer[]> well = new ArrayList<>();
|
List<String[]> well = new ArrayList<>();
|
||||||
for(String s: record) {
|
for(String s: record) {
|
||||||
if(!"".equals(s)) {
|
if(!"".equals(s)) {
|
||||||
String[] intString = s.replaceAll("\\[", "")
|
String[] sequences = s.replaceAll("\\[", "")
|
||||||
.replaceAll("]", "")
|
.replaceAll("]", "")
|
||||||
.replaceAll(" ", "")
|
.replaceAll(" ", "")
|
||||||
.split(",");
|
.split(",");
|
||||||
//System.out.println(intString);
|
//System.out.println(sequences);
|
||||||
Integer[] arr = new Integer[intString.length];
|
String[] arr = new String[sequences.length];
|
||||||
for (int i = 0; i < intString.length; i++) {
|
for (int i = 0; i < sequences.length; i++) {
|
||||||
arr[i] = Integer.valueOf(intString[i]);
|
arr[i] = sequences[i];
|
||||||
}
|
}
|
||||||
well.add(arr);
|
well.add(arr);
|
||||||
}
|
}
|
||||||
@@ -56,11 +56,8 @@ public class PlateFileReader {
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public List<List<Integer[]>> getWells() {
|
public Plate getSamplePlate() {
|
||||||
return wells;
|
return new Plate(filename, wells);
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getFilename() {
|
|
||||||
return filename;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
@@ -10,13 +10,13 @@ import java.util.*;
|
|||||||
|
|
||||||
public class PlateFileWriter {
|
public class PlateFileWriter {
|
||||||
private int size;
|
private int size;
|
||||||
private List<List<Integer[]>> wells;
|
private List<List<String[]>> wells;
|
||||||
private double stdDev;
|
private double stdDev;
|
||||||
private double lambda;
|
private double lambda;
|
||||||
private Double error;
|
private Double error;
|
||||||
private String filename;
|
private String filename;
|
||||||
private String sourceFileName;
|
private String sourceFileName;
|
||||||
private Integer[] concentrations;
|
private Integer[] populations;
|
||||||
private boolean isExponential = false;
|
private boolean isExponential = false;
|
||||||
|
|
||||||
public PlateFileWriter(String filename, Plate plate) {
|
public PlateFileWriter(String filename, Plate plate) {
|
||||||
@@ -35,18 +35,18 @@ public class PlateFileWriter {
|
|||||||
}
|
}
|
||||||
this.error = plate.getError();
|
this.error = plate.getError();
|
||||||
this.wells = plate.getWells();
|
this.wells = plate.getWells();
|
||||||
this.concentrations = plate.getPopulations();
|
this.populations = plate.getPopulations();
|
||||||
Arrays.sort(concentrations);
|
Arrays.sort(populations);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void writePlateFile(){
|
public void writePlateFile(){
|
||||||
Comparator<List<Integer[]>> listLengthDescending = Comparator.comparingInt(List::size);
|
Comparator<List<String[]>> listLengthDescending = Comparator.comparingInt(List::size);
|
||||||
wells.sort(listLengthDescending.reversed());
|
wells.sort(listLengthDescending.reversed());
|
||||||
int maxLength = wells.get(0).size();
|
int maxLength = wells.get(0).size();
|
||||||
List<List<String>> wellsAsStrings = new ArrayList<>();
|
List<List<String>> wellsAsStrings = new ArrayList<>();
|
||||||
for (List<Integer[]> w: wells){
|
for (List<String[]> w: wells){
|
||||||
List<String> tmp = new ArrayList<>();
|
List<String> tmp = new ArrayList<>();
|
||||||
for(Integer[] c: w) {
|
for(String[] c: w) {
|
||||||
tmp.add(Arrays.toString(c));
|
tmp.add(Arrays.toString(c));
|
||||||
}
|
}
|
||||||
wellsAsStrings.add(tmp);
|
wellsAsStrings.add(tmp);
|
||||||
@@ -73,14 +73,12 @@ public class PlateFileWriter {
|
|||||||
// rows.add(tmp);
|
// rows.add(tmp);
|
||||||
// }
|
// }
|
||||||
|
|
||||||
//get list of well populations
|
//make string out of populations array
|
||||||
List<Integer> wellPopulations = Arrays.asList(concentrations);
|
|
||||||
//make string out of populations list
|
|
||||||
StringBuilder populationsStringBuilder = new StringBuilder();
|
StringBuilder populationsStringBuilder = new StringBuilder();
|
||||||
populationsStringBuilder.append(wellPopulations.remove(0).toString());
|
populationsStringBuilder.append(populations[0].toString());
|
||||||
for(Integer i: wellPopulations){
|
for(int i = 1; i < populations.length; i++){
|
||||||
populationsStringBuilder.append(", ");
|
populationsStringBuilder.append(", ");
|
||||||
populationsStringBuilder.append(i.toString());
|
populationsStringBuilder.append(populations[i].toString());
|
||||||
}
|
}
|
||||||
String wellPopulationsString = populationsStringBuilder.toString();
|
String wellPopulationsString = populationsStringBuilder.toString();
|
||||||
|
|
||||||
|
|||||||
65
src/main/java/SequenceRecord.java
Normal file
65
src/main/java/SequenceRecord.java
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
/*
|
||||||
|
Class to represent individual sequences, holding their well occupancy and read count information.
|
||||||
|
Will make a map of these keyed to the sequences themselves.
|
||||||
|
Ideally, I'll be able to construct both the Vertices and the weights matrix from this map.
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
import java.util.*;
|
||||||
|
|
||||||
|
public class SequenceRecord implements Serializable {
|
||||||
|
private final String sequence;
|
||||||
|
private final SequenceType type;
|
||||||
|
//keys are well numbers, values are read count in that well
|
||||||
|
private final Map<Integer, Integer> wells;
|
||||||
|
|
||||||
|
public SequenceRecord (String sequence, SequenceType type) {
|
||||||
|
this.sequence = sequence;
|
||||||
|
this.type = type;
|
||||||
|
this.wells = new LinkedHashMap<>();
|
||||||
|
}
|
||||||
|
|
||||||
|
//this shouldn't be necessary, since the sequence will be the map key, but
|
||||||
|
public String getSequence() {
|
||||||
|
return sequence;
|
||||||
|
}
|
||||||
|
|
||||||
|
public SequenceType getSequenceType(){
|
||||||
|
return type;
|
||||||
|
}
|
||||||
|
|
||||||
|
//use this to update the record for each new read
|
||||||
|
public void addRead(Integer wellNumber) {
|
||||||
|
wells.merge(wellNumber,1, Integer::sum);
|
||||||
|
}
|
||||||
|
|
||||||
|
//don't know if I'll ever need this
|
||||||
|
public void addWellData(Integer wellNumber, Integer readCount) {
|
||||||
|
wells.put(wellNumber, readCount);
|
||||||
|
}
|
||||||
|
|
||||||
|
public Set<Integer> getWells() {
|
||||||
|
return wells.keySet();
|
||||||
|
}
|
||||||
|
|
||||||
|
public Map<Integer, Integer> getWellOccupancies() { return wells;}
|
||||||
|
|
||||||
|
public boolean isInWell(Integer wellNumber) {
|
||||||
|
return wells.containsKey(wellNumber);
|
||||||
|
}
|
||||||
|
|
||||||
|
public Integer getOccupancy() {
|
||||||
|
return wells.size();
|
||||||
|
}
|
||||||
|
|
||||||
|
//read count for whole plate
|
||||||
|
public Integer getReadCount(){
|
||||||
|
return wells.values().stream().mapToInt(Integer::valueOf).sum();
|
||||||
|
}
|
||||||
|
|
||||||
|
//read count in a specific well
|
||||||
|
public Integer getReadCount(Integer wellNumber) {
|
||||||
|
return wells.get(wellNumber);
|
||||||
|
}
|
||||||
|
}
|
||||||
8
src/main/java/SequenceType.java
Normal file
8
src/main/java/SequenceType.java
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
//enum for tagging types of sequences
|
||||||
|
//Listed in order that they appear in a cell array, so ordinal() method will return correct index
|
||||||
|
public enum SequenceType {
|
||||||
|
CDR3_ALPHA,
|
||||||
|
CDR3_BETA,
|
||||||
|
CDR1_ALPHA,
|
||||||
|
CDR1_BETA
|
||||||
|
}
|
||||||
@@ -1,9 +1,9 @@
|
|||||||
import org.jgrapht.Graph;
|
|
||||||
import org.jgrapht.alg.interfaces.MatchingAlgorithm;
|
import org.jgrapht.alg.interfaces.MatchingAlgorithm;
|
||||||
import org.jgrapht.alg.matching.MaximumWeightBipartiteMatching;
|
import org.jgrapht.alg.matching.MaximumWeightBipartiteMatching;
|
||||||
import org.jgrapht.generate.SimpleWeightedBipartiteGraphMatrixGenerator;
|
import org.jgrapht.generate.SimpleWeightedBipartiteGraphMatrixGenerator;
|
||||||
import org.jgrapht.graph.DefaultWeightedEdge;
|
import org.jgrapht.graph.DefaultWeightedEdge;
|
||||||
import org.jgrapht.graph.SimpleWeightedGraph;
|
import org.jgrapht.graph.SimpleWeightedGraph;
|
||||||
|
import org.jheaps.tree.FibonacciHeap;
|
||||||
import org.jheaps.tree.PairingHeap;
|
import org.jheaps.tree.PairingHeap;
|
||||||
|
|
||||||
import java.math.BigDecimal;
|
import java.math.BigDecimal;
|
||||||
@@ -12,130 +12,128 @@ import java.text.NumberFormat;
|
|||||||
import java.time.Instant;
|
import java.time.Instant;
|
||||||
import java.time.Duration;
|
import java.time.Duration;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
import java.util.stream.IntStream;
|
/*
|
||||||
|
Refactor notes
|
||||||
|
What would be necessary to do everything with only one scan through the sample plate?
|
||||||
|
I would need to keep a list of sequences (real and spurious), and metadata about each sequence.
|
||||||
|
I would need the data:
|
||||||
|
* # of each well the sequence appears in
|
||||||
|
* Read count in that well
|
||||||
|
*/
|
||||||
|
|
||||||
|
|
||||||
//NOTE: "sequence" in method and variable names refers to a peptide sequence from a simulated T cell
|
//NOTE: "sequence" in method and variable names refers to a peptide sequence from a simulated T cell
|
||||||
public class Simulator {
|
public class Simulator implements GraphModificationFunctions {
|
||||||
private static final int cdr3AlphaIndex = 0;
|
|
||||||
private static final int cdr3BetaIndex = 1;
|
|
||||||
private static final int cdr1AlphaIndex = 2;
|
|
||||||
private static final int cdr1BetaIndex = 3;
|
|
||||||
|
|
||||||
public static CellSample generateCellSample(Integer numDistinctCells, Integer cdr1Freq) {
|
|
||||||
//In real T cells, CDR1s have about one third the diversity of CDR3s
|
|
||||||
List<Integer> numbersCDR3 = new ArrayList<>();
|
|
||||||
List<Integer> numbersCDR1 = new ArrayList<>();
|
|
||||||
Integer numDistCDR3s = 2 * numDistinctCells + 1;
|
|
||||||
IntStream.range(1, numDistCDR3s + 1).forEach(i -> numbersCDR3.add(i));
|
|
||||||
IntStream.range(numDistCDR3s + 1, numDistCDR3s + 1 + (numDistCDR3s / cdr1Freq) + 1).forEach(i -> numbersCDR1.add(i));
|
|
||||||
Collections.shuffle(numbersCDR3);
|
|
||||||
Collections.shuffle(numbersCDR1);
|
|
||||||
|
|
||||||
//Each cell represented by 4 values
|
public static GraphWithMapData makeCDR3Graph(CellSample cellSample, Plate samplePlate, int readDepth,
|
||||||
//two CDR3s, and two CDR1s. First two values are CDR3s (alpha, beta), second two are CDR1s (alpha, beta)
|
double readErrorRate, double errorCollisionRate, boolean verbose) {
|
||||||
List<Integer[]> distinctCells = new ArrayList<>();
|
//start timing
|
||||||
for(int i = 0; i < numbersCDR3.size() - 1; i = i + 2){
|
|
||||||
Integer tmpCDR3a = numbersCDR3.get(i);
|
|
||||||
Integer tmpCDR3b = numbersCDR3.get(i+1);
|
|
||||||
Integer tmpCDR1a = numbersCDR1.get(i % numbersCDR1.size());
|
|
||||||
Integer tmpCDR1b = numbersCDR1.get((i+1) % numbersCDR1.size());
|
|
||||||
Integer[] tmp = {tmpCDR3a, tmpCDR3b, tmpCDR1a, tmpCDR1b};
|
|
||||||
distinctCells.add(tmp);
|
|
||||||
}
|
|
||||||
return new CellSample(distinctCells, cdr1Freq);
|
|
||||||
}
|
|
||||||
|
|
||||||
//Make the graph needed for matching CDR3s
|
|
||||||
public static GraphWithMapData makeGraph(List<Integer[]> distinctCells, Plate samplePlate, boolean verbose) {
|
|
||||||
Instant start = Instant.now();
|
Instant start = Instant.now();
|
||||||
int[] alphaIndex = {cdr3AlphaIndex};
|
int[] alphaIndices = {SequenceType.CDR3_ALPHA.ordinal()};
|
||||||
int[] betaIndex = {cdr3BetaIndex};
|
int[] betaIndices = {SequenceType.CDR3_BETA.ordinal()};
|
||||||
|
List<String[]> distinctCells = cellSample.getCells();
|
||||||
int numWells = samplePlate.getSize();
|
int numWells = samplePlate.getSize();
|
||||||
|
|
||||||
|
//Make a hashmap keyed to alphas, values are associated betas.
|
||||||
if(verbose){System.out.println("Making cell maps");}
|
if(verbose){System.out.println("Making cell maps");}
|
||||||
//HashMap keyed to Alphas, values Betas
|
Map<String, String> distCellsMapAlphaKey = makeSequenceToSequenceMap(distinctCells,
|
||||||
Map<Integer, Integer> distCellsMapAlphaKey = makeSequenceToSequenceMap(distinctCells, 0, 1);
|
SequenceType.CDR3_ALPHA.ordinal(), SequenceType.CDR3_BETA.ordinal());
|
||||||
if(verbose){System.out.println("Cell maps made");}
|
if(verbose){System.out.println("Cell maps made");}
|
||||||
|
|
||||||
if(verbose){System.out.println("Making well maps");}
|
//Make linkedHashMap keyed to sequences, values are SequenceRecords reflecting plate statistics
|
||||||
Map<Integer, Integer> allAlphas = samplePlate.assayWellsSequenceS(alphaIndex);
|
if(verbose){System.out.println("Making sample plate sequence maps");}
|
||||||
Map<Integer, Integer> allBetas = samplePlate.assayWellsSequenceS(betaIndex);
|
Map<String, SequenceRecord> alphaSequences = samplePlate.countSequences(readDepth, readErrorRate,
|
||||||
int alphaCount = allAlphas.size();
|
errorCollisionRate, alphaIndices);
|
||||||
if(verbose){System.out.println("All alphas count: " + alphaCount);}
|
int alphaCount = alphaSequences.size();
|
||||||
int betaCount = allBetas.size();
|
if(verbose){System.out.println("Alphas sequences read: " + alphaCount);}
|
||||||
if(verbose){System.out.println("All betas count: " + betaCount);}
|
Map<String, SequenceRecord> betaSequences = samplePlate.countSequences(readDepth, readErrorRate,
|
||||||
if(verbose){System.out.println("Well maps made");}
|
errorCollisionRate, betaIndices);
|
||||||
|
int betaCount = betaSequences.size();
|
||||||
|
if(verbose){System.out.println("Betas sequences read: " + betaCount);}
|
||||||
|
if(verbose){System.out.println("Sample plate sequence maps made");}
|
||||||
|
|
||||||
|
//pre-filter saturating sequences and sequences likely to be misreads
|
||||||
if(verbose){System.out.println("Removing sequences present in all wells.");}
|
if(verbose){System.out.println("Removing sequences present in all wells.");}
|
||||||
filterByOccupancyThresholds(allAlphas, 1, numWells - 1);
|
filterByOccupancyThresholds(alphaSequences, 1, numWells - 1);
|
||||||
filterByOccupancyThresholds(allBetas, 1, numWells - 1);
|
filterByOccupancyThresholds(betaSequences, 1, numWells - 1);
|
||||||
if(verbose){System.out.println("Sequences removed");}
|
if(verbose){System.out.println("Sequences removed");}
|
||||||
int pairableAlphaCount = allAlphas.size();
|
if(verbose){System.out.println("Remaining alpha sequence count: " + alphaSequences.size());}
|
||||||
if(verbose){System.out.println("Remaining alphas count: " + pairableAlphaCount);}
|
if(verbose){System.out.println("Remaining beta sequence count: " + betaSequences.size());}
|
||||||
int pairableBetaCount = allBetas.size();
|
if (readDepth > 1) {
|
||||||
if(verbose){System.out.println("Remaining betas count: " + pairableBetaCount);}
|
if(verbose){System.out.println("Removing sequences with disparate occupancies and read counts");}
|
||||||
|
filterByOccupancyAndReadCount(alphaSequences, readDepth);
|
||||||
|
filterByOccupancyAndReadCount(betaSequences, readDepth);
|
||||||
|
if(verbose){System.out.println("Sequences removed");}
|
||||||
|
if(verbose){System.out.println("Remaining alpha sequence count: " + alphaSequences.size());}
|
||||||
|
if(verbose){System.out.println("Remaining beta sequence count: " + betaSequences.size());}
|
||||||
|
}
|
||||||
|
int pairableAlphaCount = alphaSequences.size();
|
||||||
|
if(verbose){System.out.println("Remaining alpha sequence count: " + pairableAlphaCount);}
|
||||||
|
int pairableBetaCount = betaSequences.size();
|
||||||
|
if(verbose){System.out.println("Remaining beta sequence count: " + pairableBetaCount);}
|
||||||
|
|
||||||
|
//construct the graph. For simplicity, going to make
|
||||||
if(verbose){System.out.println("Making vertex maps");}
|
if(verbose){System.out.println("Making vertex maps");}
|
||||||
//For the SimpleWeightedBipartiteGraphMatrixGenerator, all vertices must have
|
//For the SimpleWeightedBipartiteGraphMatrixGenerator, all vertices must have
|
||||||
//distinct numbers associated with them. Since I'm using a 2D array, that means
|
//distinct numbers associated with them. Since I'm using a 2D array, that means
|
||||||
//distinct indices between the rows and columns. vertexStartValue lets me track where I switch
|
//distinct indices between the rows and columns. vertexStartValue lets me track where I switch
|
||||||
//from numbering rows to columns, so I can assign unique numbers to every vertex, and then
|
//from numbering rows to columns, so I can assign unique numbers to every vertex, and then
|
||||||
//subtract the vertexStartValue from betas to use their vertex labels as array indices
|
//subtract the vertexStartValue from betas to use their vertex labels as array indices
|
||||||
Integer vertexStartValue = 0;
|
int vertexStartValue = 0;
|
||||||
//keys are sequential integer vertices, values are alphas
|
//keys are sequential integer vertices, values are alphas
|
||||||
Map<Integer, Integer> plateVtoAMap = makeVertexToSequenceMap(allAlphas, vertexStartValue);
|
Map<String, Integer> plateAtoVMap = makeSequenceToVertexMap(alphaSequences, vertexStartValue);
|
||||||
//new start value for vertex to beta map should be one more than final vertex value in alpha map
|
//new start value for vertex to beta map should be one more than final vertex value in alpha map
|
||||||
vertexStartValue += plateVtoAMap.size();
|
vertexStartValue += plateAtoVMap.size();
|
||||||
//keys are sequential integers vertices, values are betas
|
//keys are betas, values are sequential integers
|
||||||
Map<Integer, Integer> plateVtoBMap = makeVertexToSequenceMap(allBetas, vertexStartValue);
|
Map<String, Integer> plateBtoVMap = makeSequenceToVertexMap(betaSequences, vertexStartValue);
|
||||||
//keys are alphas, values are sequential integer vertices from previous map
|
|
||||||
Map<Integer, Integer> plateAtoVMap = invertVertexMap(plateVtoAMap);
|
|
||||||
//keys are betas, values are sequential integer vertices from previous map
|
|
||||||
Map<Integer, Integer> plateBtoVMap = invertVertexMap(plateVtoBMap);
|
|
||||||
if(verbose){System.out.println("Vertex maps made");}
|
if(verbose){System.out.println("Vertex maps made");}
|
||||||
|
|
||||||
//make adjacency matrix for bipartite graph generator
|
//make adjacency matrix for bipartite graph generator
|
||||||
//(technically this is only 1/4 of an adjacency matrix, but that's all you need
|
//(technically this is only 1/4 of an adjacency matrix, but that's all you need
|
||||||
//for a bipartite graph, and all the SimpleWeightedBipartiteGraphMatrixGenerator class expects.)
|
//for a bipartite graph, and all the SimpleWeightedBipartiteGraphMatrixGenerator class expects.)
|
||||||
if(verbose){System.out.println("Creating adjacency matrix");}
|
if(verbose){System.out.println("Making adjacency matrix");}
|
||||||
//Count how many wells each alpha appears in
|
double[][] weights = new double[plateAtoVMap.size()][plateBtoVMap.size()];
|
||||||
Map<Integer, Integer> alphaWellCounts = new HashMap<>();
|
fillAdjacencyMatrix(weights, vertexStartValue, alphaSequences, betaSequences, plateAtoVMap, plateBtoVMap);
|
||||||
//count how many wells each beta appears in
|
if(verbose){System.out.println("Adjacency matrix made");}
|
||||||
Map<Integer, Integer> betaWellCounts = new HashMap<>();
|
//make bipartite graph
|
||||||
//the adjacency matrix to be used by the graph generator
|
if(verbose){System.out.println("Making bipartite weighted graph");}
|
||||||
double[][] weights = new double[plateVtoAMap.size()][plateVtoBMap.size()];
|
|
||||||
countSequencesAndFillMatrix(samplePlate, allAlphas, allBetas, plateAtoVMap,
|
|
||||||
plateBtoVMap, alphaIndex, betaIndex, alphaWellCounts, betaWellCounts, weights);
|
|
||||||
if(verbose){System.out.println("Matrix created");}
|
|
||||||
|
|
||||||
//create bipartite graph
|
|
||||||
if(verbose){System.out.println("Creating graph");}
|
|
||||||
//the graph object
|
//the graph object
|
||||||
SimpleWeightedGraph<Integer, DefaultWeightedEdge> graph =
|
SimpleWeightedGraph<Vertex, DefaultWeightedEdge> graph =
|
||||||
new SimpleWeightedGraph<>(DefaultWeightedEdge.class);
|
new SimpleWeightedGraph<>(DefaultWeightedEdge.class);
|
||||||
//the graph generator
|
//the graph generator
|
||||||
SimpleWeightedBipartiteGraphMatrixGenerator graphGenerator = new SimpleWeightedBipartiteGraphMatrixGenerator();
|
SimpleWeightedBipartiteGraphMatrixGenerator graphGenerator = new SimpleWeightedBipartiteGraphMatrixGenerator();
|
||||||
//the list of alpha vertices
|
//the list of alpha vertices
|
||||||
List<Integer> alphaVertices = new ArrayList<>(plateVtoAMap.keySet()); //This will work because LinkedHashMap preserves order of entry
|
List<Vertex> alphaVertices = new ArrayList<>();
|
||||||
|
for (String seq : plateAtoVMap.keySet()) {
|
||||||
|
Vertex alphaVertex = new Vertex(alphaSequences.get(seq), plateAtoVMap.get(seq));
|
||||||
|
alphaVertices.add(alphaVertex);
|
||||||
|
}
|
||||||
|
//Sort to make sure the order of vertices in list matches the order of the adjacency matrix
|
||||||
|
Collections.sort(alphaVertices);
|
||||||
|
//Add ordered list of vertices to the graph
|
||||||
graphGenerator.first(alphaVertices);
|
graphGenerator.first(alphaVertices);
|
||||||
//the list of beta vertices
|
//the list of beta vertices
|
||||||
List<Integer> betaVertices = new ArrayList<>(plateVtoBMap.keySet());
|
List<Vertex> betaVertices = new ArrayList<>();
|
||||||
graphGenerator.second(betaVertices); //This will work because LinkedHashMap preserves order of entry
|
for (String seq : plateBtoVMap.keySet()) {
|
||||||
|
Vertex betaVertex = new Vertex(betaSequences.get(seq), plateBtoVMap.get(seq));
|
||||||
|
betaVertices.add(betaVertex);
|
||||||
|
}
|
||||||
|
//Sort to make sure the order of vertices in list matches the order of the adjacency matrix
|
||||||
|
Collections.sort(betaVertices);
|
||||||
|
//Add ordered list of vertices to the graph
|
||||||
|
graphGenerator.second(betaVertices);
|
||||||
//use adjacency matrix of weight created previously
|
//use adjacency matrix of weight created previously
|
||||||
graphGenerator.weights(weights);
|
graphGenerator.weights(weights);
|
||||||
graphGenerator.generateGraph(graph);
|
graphGenerator.generateGraph(graph);
|
||||||
if(verbose){System.out.println("Graph created");}
|
if(verbose){System.out.println("Graph created");}
|
||||||
|
//stop timing
|
||||||
Instant stop = Instant.now();
|
Instant stop = Instant.now();
|
||||||
Duration time = Duration.between(start, stop);
|
Duration time = Duration.between(start, stop);
|
||||||
|
|
||||||
//create GraphWithMapData object
|
//create GraphWithMapData object
|
||||||
GraphWithMapData output = new GraphWithMapData(graph, numWells, samplePlate.getPopulations(), alphaCount, betaCount,
|
GraphWithMapData output = new GraphWithMapData(graph, numWells, samplePlate.getPopulations(), distCellsMapAlphaKey,
|
||||||
distCellsMapAlphaKey, plateVtoAMap, plateVtoBMap, plateAtoVMap,
|
alphaCount, betaCount, readDepth, readErrorRate, errorCollisionRate, time);
|
||||||
plateBtoVMap, alphaWellCounts, betaWellCounts, time);
|
|
||||||
//Set source file name in graph to name of sample plate
|
//Set source file name in graph to name of sample plate
|
||||||
output.setSourceFilename(samplePlate.getSourceFileName());
|
output.setSourceFilename(samplePlate.getFilename());
|
||||||
//return GraphWithMapData object
|
//return GraphWithMapData object
|
||||||
return output;
|
return output;
|
||||||
}
|
}
|
||||||
@@ -145,47 +143,70 @@ public class Simulator {
|
|||||||
Integer highThreshold, Integer maxOccupancyDifference,
|
Integer highThreshold, Integer maxOccupancyDifference,
|
||||||
Integer minOverlapPercent, boolean verbose) {
|
Integer minOverlapPercent, boolean verbose) {
|
||||||
Instant start = Instant.now();
|
Instant start = Instant.now();
|
||||||
//Integer arrays will contain TO VERTEX, FROM VERTEX, and WEIGHT (which I'll need to cast to double)
|
SimpleWeightedGraph<Vertex, DefaultWeightedEdge> graph = data.getGraph();
|
||||||
List<Integer[]> removedEdges = new ArrayList<>();
|
Map<Vertex[], Integer> removedEdges = new HashMap<>();
|
||||||
|
boolean saveEdges = BiGpairSEQ.cacheGraph();
|
||||||
int numWells = data.getNumWells();
|
int numWells = data.getNumWells();
|
||||||
Integer alphaCount = data.getAlphaCount();
|
//Integer alphaCount = data.getAlphaCount();
|
||||||
Integer betaCount = data.getBetaCount();
|
//Integer betaCount = data.getBetaCount();
|
||||||
Map<Integer, Integer> distCellsMapAlphaKey = data.getDistCellsMapAlphaKey();
|
Map<String, String> distCellsMapAlphaKey = data.getDistCellsMapAlphaKey();
|
||||||
Map<Integer, Integer> plateVtoAMap = data.getPlateVtoAMap();
|
Set<Vertex> alphas = new HashSet<>();
|
||||||
Map<Integer, Integer> plateVtoBMap = data.getPlateVtoBMap();
|
Set<Vertex> betas = new HashSet<>();
|
||||||
Map<Integer, Integer> alphaWellCounts = data.getAlphaWellCounts();
|
for(Vertex v: graph.vertexSet()) {
|
||||||
Map<Integer, Integer> betaWellCounts = data.getBetaWellCounts();
|
if (SequenceType.CDR3_ALPHA.equals(v.getType())){
|
||||||
SimpleWeightedGraph<Integer, DefaultWeightedEdge> graph = data.getGraph();
|
alphas.add(v);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
betas.add(v);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Integer graphAlphaCount = alphas.size();
|
||||||
|
Integer graphBetaCount = betas.size();
|
||||||
|
|
||||||
//remove edges with weights outside given overlap thresholds, add those to removed edge list
|
//remove edges with weights outside given overlap thresholds, add those to removed edge list
|
||||||
if(verbose){System.out.println("Eliminating edges with weights outside overlap threshold values");}
|
if(verbose){System.out.println("Eliminating edges with weights outside overlap threshold values");}
|
||||||
removedEdges.addAll(GraphModificationFunctions.filterByOverlapThresholds(graph, lowThreshold, highThreshold));
|
removedEdges.putAll(GraphModificationFunctions.filterByOverlapThresholds(graph, lowThreshold, highThreshold, saveEdges));
|
||||||
if(verbose){System.out.println("Over- and under-weight edges removed");}
|
if(verbose){System.out.println("Over- and under-weight edges removed");}
|
||||||
|
|
||||||
//remove edges between vertices with too small an overlap size, add those to removed edge list
|
//remove edges between vertices with too small an overlap size, add those to removed edge list
|
||||||
if(verbose){System.out.println("Eliminating edges with weights less than " + minOverlapPercent.toString() +
|
if(verbose){System.out.println("Eliminating edges with weights less than " + minOverlapPercent.toString() +
|
||||||
" percent of vertex occupancy value.");}
|
" percent of vertex occupancy value.");}
|
||||||
removedEdges.addAll(GraphModificationFunctions.filterByOverlapPercent(graph, alphaWellCounts, betaWellCounts,
|
removedEdges.putAll(GraphModificationFunctions.filterByOverlapPercent(graph, minOverlapPercent, saveEdges));
|
||||||
plateVtoAMap, plateVtoBMap, minOverlapPercent));
|
|
||||||
if(verbose){System.out.println("Edges with weights too far below a vertex occupancy value removed");}
|
if(verbose){System.out.println("Edges with weights too far below a vertex occupancy value removed");}
|
||||||
|
|
||||||
//Filter by relative occupancy
|
//Filter by relative occupancy
|
||||||
if(verbose){System.out.println("Eliminating edges between vertices with occupancy difference > "
|
if(verbose){System.out.println("Eliminating edges between vertices with occupancy difference > "
|
||||||
+ maxOccupancyDifference);}
|
+ maxOccupancyDifference);}
|
||||||
removedEdges.addAll(GraphModificationFunctions.filterByRelativeOccupancy(graph, alphaWellCounts, betaWellCounts,
|
removedEdges.putAll(GraphModificationFunctions.filterByRelativeOccupancy(graph, maxOccupancyDifference, saveEdges));
|
||||||
plateVtoAMap, plateVtoBMap, maxOccupancyDifference));
|
|
||||||
if(verbose){System.out.println("Edges between vertices of with excessively different occupancy values " +
|
if(verbose){System.out.println("Edges between vertices of with excessively different occupancy values " +
|
||||||
"removed");}
|
"removed");}
|
||||||
|
|
||||||
//Find Maximum Weighted Matching
|
//Find Maximum Weight Matching
|
||||||
//using jheaps library class PairingHeap for improved efficiency
|
//using jheaps library class PairingHeap for improved efficiency
|
||||||
if(verbose){System.out.println("Finding maximum weighted matching");}
|
if(verbose){System.out.println("Finding maximum weight matching");}
|
||||||
//Attempting to use addressable heap to improve performance
|
MaximumWeightBipartiteMatching maxWeightMatching;
|
||||||
MaximumWeightBipartiteMatching maxWeightMatching =
|
//Use correct heap type for priority queue
|
||||||
new MaximumWeightBipartiteMatching(graph,
|
String heapType = BiGpairSEQ.getPriorityQueueHeapType();
|
||||||
plateVtoAMap.keySet(),
|
switch (heapType) {
|
||||||
plateVtoBMap.keySet(),
|
case "PAIRING" -> {
|
||||||
|
maxWeightMatching = new MaximumWeightBipartiteMatching(graph,
|
||||||
|
alphas,
|
||||||
|
betas,
|
||||||
i -> new PairingHeap(Comparator.naturalOrder()));
|
i -> new PairingHeap(Comparator.naturalOrder()));
|
||||||
|
}
|
||||||
|
case "FIBONACCI" -> {
|
||||||
|
maxWeightMatching = new MaximumWeightBipartiteMatching(graph,
|
||||||
|
alphas,
|
||||||
|
betas,
|
||||||
|
i -> new FibonacciHeap(Comparator.naturalOrder()));
|
||||||
|
}
|
||||||
|
default -> {
|
||||||
|
maxWeightMatching = new MaximumWeightBipartiteMatching(graph,
|
||||||
|
alphas,
|
||||||
|
betas);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
//get the matching
|
||||||
MatchingAlgorithm.Matching<String, DefaultWeightedEdge> graphMatching = maxWeightMatching.getMatching();
|
MatchingAlgorithm.Matching<String, DefaultWeightedEdge> graphMatching = maxWeightMatching.getMatching();
|
||||||
if(verbose){System.out.println("Matching completed");}
|
if(verbose){System.out.println("Matching completed");}
|
||||||
Instant stop = Instant.now();
|
Instant stop = Instant.now();
|
||||||
@@ -209,14 +230,14 @@ public class Simulator {
|
|||||||
int trueCount = 0;
|
int trueCount = 0;
|
||||||
int falseCount = 0;
|
int falseCount = 0;
|
||||||
boolean check;
|
boolean check;
|
||||||
Map<Integer, Integer> matchMap = new HashMap<>();
|
Map<String, String> matchMap = new HashMap<>();
|
||||||
while(weightIter.hasNext()) {
|
while(weightIter.hasNext()) {
|
||||||
e = weightIter.next();
|
e = weightIter.next();
|
||||||
Integer source = graph.getEdgeSource(e);
|
Vertex source = graph.getEdgeSource(e);
|
||||||
Integer target = graph.getEdgeTarget(e);
|
Vertex target = graph.getEdgeTarget(e);
|
||||||
//The match map is all matches found, not just true matches!
|
//The match map is all matches found, not just true matches!
|
||||||
matchMap.put(plateVtoAMap.get(source), plateVtoBMap.get(target));
|
matchMap.put(source.getSequence(), target.getSequence());
|
||||||
check = plateVtoBMap.get(target).equals(distCellsMapAlphaKey.get(plateVtoAMap.get(source)));
|
check = target.getSequence().equals(distCellsMapAlphaKey.get(source.getSequence()));
|
||||||
if(check) {
|
if(check) {
|
||||||
trueCount++;
|
trueCount++;
|
||||||
}
|
}
|
||||||
@@ -224,33 +245,44 @@ public class Simulator {
|
|||||||
falseCount++;
|
falseCount++;
|
||||||
}
|
}
|
||||||
List<String> result = new ArrayList<>();
|
List<String> result = new ArrayList<>();
|
||||||
result.add(plateVtoAMap.get(source).toString());
|
//alpha sequence
|
||||||
|
result.add(source.getSequence());
|
||||||
//alpha well count
|
//alpha well count
|
||||||
result.add(alphaWellCounts.get(plateVtoAMap.get(source)).toString());
|
result.add(source.getOccupancy().toString());
|
||||||
result.add(plateVtoBMap.get(target).toString());
|
//beta sequence
|
||||||
|
result.add(target.getSequence());
|
||||||
//beta well count
|
//beta well count
|
||||||
result.add(betaWellCounts.get(plateVtoBMap.get(target)).toString());
|
result.add(target.getOccupancy().toString());
|
||||||
//overlap count
|
//overlap count
|
||||||
result.add(Double.toString(graph.getEdgeWeight(e)));
|
result.add(Double.toString(graph.getEdgeWeight(e)));
|
||||||
result.add(Boolean.toString(check));
|
result.add(Boolean.toString(check));
|
||||||
double pValue = Equations.pValue(numWells, alphaWellCounts.get(plateVtoAMap.get(source)),
|
double pValue = Equations.pValue(numWells, source.getOccupancy(),
|
||||||
betaWellCounts.get(plateVtoBMap.get(target)), graph.getEdgeWeight(e));
|
target.getOccupancy(), graph.getEdgeWeight(e));
|
||||||
BigDecimal pValueTrunc = new BigDecimal(pValue, mc);
|
BigDecimal pValueTrunc = new BigDecimal(pValue, mc);
|
||||||
result.add(pValueTrunc.toString());
|
result.add(pValueTrunc.toString());
|
||||||
allResults.add(result);
|
allResults.add(result);
|
||||||
}
|
}
|
||||||
|
|
||||||
//Metadata comments for CSV file
|
//Metadata comments for CSV file
|
||||||
int min = Math.min(alphaCount, betaCount);
|
String algoType = "LEDA book with heap: " + heapType;
|
||||||
|
int min = Math.min(graphAlphaCount, graphBetaCount);
|
||||||
|
//matching weight
|
||||||
|
BigDecimal totalMatchingWeight = maxWeightMatching.getMatchingWeight();
|
||||||
//rate of attempted matching
|
//rate of attempted matching
|
||||||
double attemptRate = (double) (trueCount + falseCount) / min;
|
double attemptRate = (double) (trueCount + falseCount) / min;
|
||||||
BigDecimal attemptRateTrunc = new BigDecimal(attemptRate, mc);
|
BigDecimal attemptRateTrunc = new BigDecimal(attemptRate, mc);
|
||||||
//rate of pairing error
|
//rate of pairing error
|
||||||
double pairingErrorRate = (double) falseCount / (trueCount + falseCount);
|
double pairingErrorRate = (double) falseCount / (trueCount + falseCount);
|
||||||
BigDecimal pairingErrorRateTrunc = new BigDecimal(pairingErrorRate, mc);
|
BigDecimal pairingErrorRateTrunc;
|
||||||
//get list of well concentrations
|
if(Double.isFinite(pairingErrorRate)) {
|
||||||
Integer[] wellPopulations = data.getWellConcentrations();
|
pairingErrorRateTrunc = new BigDecimal(pairingErrorRate, mc);
|
||||||
//make string out of concentrations list
|
}
|
||||||
|
else{
|
||||||
|
pairingErrorRateTrunc = new BigDecimal(-1, mc);
|
||||||
|
}
|
||||||
|
//get list of well populations
|
||||||
|
Integer[] wellPopulations = data.getWellPopulations();
|
||||||
|
//make string out of populations list
|
||||||
StringBuilder populationsStringBuilder = new StringBuilder();
|
StringBuilder populationsStringBuilder = new StringBuilder();
|
||||||
populationsStringBuilder.append(wellPopulations[0].toString());
|
populationsStringBuilder.append(wellPopulations[0].toString());
|
||||||
for(int i = 1; i < wellPopulations.length; i++){
|
for(int i = 1; i < wellPopulations.length; i++){
|
||||||
@@ -258,37 +290,55 @@ public class Simulator {
|
|||||||
populationsStringBuilder.append(wellPopulations[i].toString());
|
populationsStringBuilder.append(wellPopulations[i].toString());
|
||||||
}
|
}
|
||||||
String wellPopulationsString = populationsStringBuilder.toString();
|
String wellPopulationsString = populationsStringBuilder.toString();
|
||||||
|
//graph generation time
|
||||||
|
Duration graphTime = data.getTime();
|
||||||
|
//MWM run time
|
||||||
|
Duration pairingTime = Duration.between(start, stop);
|
||||||
//total simulation time
|
//total simulation time
|
||||||
Duration time = Duration.between(start, stop);
|
Duration totalTime = graphTime.plus(pairingTime);
|
||||||
time = time.plus(data.getTime());
|
|
||||||
|
|
||||||
Map<String, String> metadata = new LinkedHashMap<>();
|
Map<String, String> metadata = new LinkedHashMap<>();
|
||||||
metadata.put("sample plate filename", data.getSourceFilename());
|
metadata.put("sample plate filename", data.getSourceFilename());
|
||||||
metadata.put("graph filename", dataFilename);
|
metadata.put("graph filename", dataFilename);
|
||||||
|
metadata.put("MWM algorithm type", algoType);
|
||||||
|
metadata.put("matching weight", totalMatchingWeight.toString());
|
||||||
metadata.put("well populations", wellPopulationsString);
|
metadata.put("well populations", wellPopulationsString);
|
||||||
metadata.put("total alphas found", alphaCount.toString());
|
metadata.put("sequence read depth", data.getReadDepth().toString());
|
||||||
metadata.put("total betas found", betaCount.toString());
|
metadata.put("sequence read error rate", data.getReadErrorRate().toString());
|
||||||
metadata.put("high overlap threshold", highThreshold.toString());
|
metadata.put("read error collision rate", data.getErrorCollisionRate().toString());
|
||||||
metadata.put("low overlap threshold", lowThreshold.toString());
|
metadata.put("total alphas read from plate", data.getAlphaCount().toString());
|
||||||
metadata.put("maximum occupancy difference", maxOccupancyDifference.toString());
|
metadata.put("total betas read from plate", data.getBetaCount().toString());
|
||||||
metadata.put("minimum overlap percent", minOverlapPercent.toString());
|
//HARD CODED, PARAMETERIZE LATER
|
||||||
|
metadata.put("pre-filter sequences present in all wells", "true");
|
||||||
|
//HARD CODED, PARAMETERIZE LATER
|
||||||
|
metadata.put("pre-filter sequences based on occupancy/read count discrepancy", "true");
|
||||||
|
metadata.put("alphas in graph (after pre-filtering)", graphAlphaCount.toString());
|
||||||
|
metadata.put("betas in graph (after pre-filtering)", graphBetaCount.toString());
|
||||||
|
metadata.put("high overlap threshold for pairing", highThreshold.toString());
|
||||||
|
metadata.put("low overlap threshold for pairing", lowThreshold.toString());
|
||||||
|
metadata.put("minimum overlap percent for pairing", minOverlapPercent.toString());
|
||||||
|
metadata.put("maximum occupancy difference for pairing", maxOccupancyDifference.toString());
|
||||||
metadata.put("pairing attempt rate", attemptRateTrunc.toString());
|
metadata.put("pairing attempt rate", attemptRateTrunc.toString());
|
||||||
metadata.put("correct pairing count", Integer.toString(trueCount));
|
metadata.put("correct pairing count", Integer.toString(trueCount));
|
||||||
metadata.put("incorrect pairing count", Integer.toString(falseCount));
|
metadata.put("incorrect pairing count", Integer.toString(falseCount));
|
||||||
metadata.put("pairing error rate", pairingErrorRateTrunc.toString());
|
metadata.put("pairing error rate", pairingErrorRateTrunc.toString());
|
||||||
metadata.put("simulation time", nf.format(time.toSeconds()));
|
metadata.put("time to generate graph (seconds)", nf.format(graphTime.toSeconds()));
|
||||||
|
metadata.put("time to pair sequences (seconds)",nf.format(pairingTime.toSeconds()));
|
||||||
|
metadata.put("total simulation time (seconds)", nf.format(totalTime.toSeconds()));
|
||||||
//create MatchingResult object
|
//create MatchingResult object
|
||||||
MatchingResult output = new MatchingResult(metadata, header, allResults, matchMap, time);
|
MatchingResult output = new MatchingResult(metadata, header, allResults, matchMap);
|
||||||
if(verbose){
|
if(verbose){
|
||||||
for(String s: output.getComments()){
|
for(String s: output.getComments()){
|
||||||
System.out.println(s);
|
System.out.println(s);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
//put the removed edges back on the graph
|
if(saveEdges) {
|
||||||
System.out.println("Restoring removed edges to graph.");
|
//put the removed edges back on the graph
|
||||||
GraphModificationFunctions.addRemovedEdges(graph, removedEdges);
|
System.out.println("Restoring removed edges to graph.");
|
||||||
|
GraphModificationFunctions.addRemovedEdges(graph, removedEdges);
|
||||||
|
}
|
||||||
//return MatchingResult object
|
//return MatchingResult object
|
||||||
return output;
|
return output;
|
||||||
}
|
}
|
||||||
@@ -599,81 +649,77 @@ public class Simulator {
|
|||||||
// }
|
// }
|
||||||
|
|
||||||
//Remove sequences based on occupancy
|
//Remove sequences based on occupancy
|
||||||
public static void filterByOccupancyThresholds(Map<Integer, Integer> wellMap, int low, int high){
|
public static void filterByOccupancyThresholds(Map<String, SequenceRecord> wellMap, int low, int high){
|
||||||
List<Integer> noise = new ArrayList<>();
|
List<String> noise = new ArrayList<>();
|
||||||
for(Integer k: wellMap.keySet()){
|
for(String k: wellMap.keySet()){
|
||||||
if((wellMap.get(k) > high) || (wellMap.get(k) < low)){
|
if((wellMap.get(k).getOccupancy() > high) || (wellMap.get(k).getOccupancy() < low)){
|
||||||
noise.add(k);
|
noise.add(k);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for(Integer k: noise) {
|
for(String k: noise) {
|
||||||
wellMap.remove(k);
|
wellMap.remove(k);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
//Counts the well occupancy of the row peptides and column peptides into given maps, and
|
public static void filterByOccupancyAndReadCount(Map<String, SequenceRecord> sequences, int readDepth) {
|
||||||
//fills weights in the given 2D array
|
List<String> noise = new ArrayList<>();
|
||||||
private static void countSequencesAndFillMatrix(Plate samplePlate,
|
for(String k : sequences.keySet()){
|
||||||
Map<Integer,Integer> allRowSequences,
|
//occupancy times read depth should be more than half the sequence read count if the read error rate is low
|
||||||
Map<Integer,Integer> allColumnSequences,
|
Integer threshold = (sequences.get(k).getOccupancy() * readDepth) / 2;
|
||||||
Map<Integer,Integer> rowSequenceToVertexMap,
|
if(sequences.get(k).getReadCount() < threshold) {
|
||||||
Map<Integer,Integer> columnSequenceToVertexMap,
|
noise.add(k);
|
||||||
int[] rowSequenceIndices,
|
|
||||||
int[] colSequenceIndices,
|
|
||||||
Map<Integer, Integer> rowSequenceCounts,
|
|
||||||
Map<Integer,Integer> columnSequenceCounts,
|
|
||||||
double[][] weights){
|
|
||||||
Map<Integer, Integer> wellNRowSequences = null;
|
|
||||||
Map<Integer, Integer> wellNColumnSequences = null;
|
|
||||||
int vertexStartValue = rowSequenceToVertexMap.size();
|
|
||||||
int numWells = samplePlate.getSize();
|
|
||||||
for (int n = 0; n < numWells; n++) {
|
|
||||||
wellNRowSequences = samplePlate.assayWellsSequenceS(n, rowSequenceIndices);
|
|
||||||
for (Integer a : wellNRowSequences.keySet()) {
|
|
||||||
if(allRowSequences.containsKey(a)){
|
|
||||||
rowSequenceCounts.merge(a, 1, (oldValue, newValue) -> oldValue + newValue);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
wellNColumnSequences = samplePlate.assayWellsSequenceS(n, colSequenceIndices);
|
}
|
||||||
for (Integer b : wellNColumnSequences.keySet()) {
|
for(String k : noise) {
|
||||||
if(allColumnSequences.containsKey(b)){
|
sequences.remove(k);
|
||||||
columnSequenceCounts.merge(b, 1, (oldValue, newValue) -> oldValue + newValue);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for (Integer i : wellNRowSequences.keySet()) {
|
|
||||||
if(allRowSequences.containsKey(i)){
|
|
||||||
for (Integer j : wellNColumnSequences.keySet()) {
|
|
||||||
if(allColumnSequences.containsKey(j)){
|
|
||||||
weights[rowSequenceToVertexMap.get(i)][columnSequenceToVertexMap.get(j) - vertexStartValue] += 1.0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Map<Integer, Integer> makeSequenceToSequenceMap(List<Integer[]> cells, int keySequenceIndex,
|
private static Map<String, String> makeSequenceToSequenceMap(List<String[]> cells, int keySequenceIndex,
|
||||||
int valueSequenceIndex){
|
int valueSequenceIndex){
|
||||||
Map<Integer, Integer> keySequenceToValueSequenceMap = new HashMap<>();
|
Map<String, String> keySequenceToValueSequenceMap = new HashMap<>();
|
||||||
for (Integer[] cell : cells) {
|
for (String[] cell : cells) {
|
||||||
keySequenceToValueSequenceMap.put(cell[keySequenceIndex], cell[valueSequenceIndex]);
|
keySequenceToValueSequenceMap.put(cell[keySequenceIndex], cell[valueSequenceIndex]);
|
||||||
}
|
}
|
||||||
return keySequenceToValueSequenceMap;
|
return keySequenceToValueSequenceMap;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Map<Integer, Integer> makeVertexToSequenceMap(Map<Integer, Integer> sequences, Integer startValue) {
|
private static Map<Integer, String> makeVertexToSequenceMap(Map<String, SequenceRecord> sequences, Integer startValue) {
|
||||||
Map<Integer, Integer> map = new LinkedHashMap<>(); //LinkedHashMap to preserve order of entry
|
Map<Integer, String> map = new LinkedHashMap<>(); //LinkedHashMap to preserve order of entry
|
||||||
Integer index = startValue;
|
Integer index = startValue;
|
||||||
for (Integer k: sequences.keySet()) {
|
for (String k: sequences.keySet()) {
|
||||||
map.put(index, k);
|
map.put(index, k);
|
||||||
index++;
|
index++;
|
||||||
}
|
}
|
||||||
return map;
|
return map;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Map<Integer, Integer> invertVertexMap(Map<Integer, Integer> map) {
|
private static Map<String, Integer> makeSequenceToVertexMap(Map<String, SequenceRecord> sequences, Integer startValue) {
|
||||||
Map<Integer, Integer> inverse = new HashMap<>();
|
Map<String, Integer> map = new LinkedHashMap<>(); //LinkedHashMap to preserve order of entry
|
||||||
|
Integer index = startValue;
|
||||||
|
for (String k: sequences.keySet()) {
|
||||||
|
map.put(k, index);
|
||||||
|
index++;
|
||||||
|
}
|
||||||
|
return map;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void fillAdjacencyMatrix(double[][] weights, Integer vertexOffsetValue, Map<String, SequenceRecord> rowSequences,
|
||||||
|
Map<String, SequenceRecord> columnSequences, Map<String, Integer> rowToVertexMap,
|
||||||
|
Map<String, Integer> columnToVertexMap) {
|
||||||
|
for (String rowSeq: rowSequences.keySet()) {
|
||||||
|
for (Integer well: rowSequences.get(rowSeq).getWells()) {
|
||||||
|
for (String colSeq: columnSequences.keySet()) {
|
||||||
|
if (columnSequences.get(colSeq).isInWell(well)) {
|
||||||
|
weights[rowToVertexMap.get(rowSeq)][columnToVertexMap.get(colSeq) - vertexOffsetValue] += 1.0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Map<String, Integer> invertVertexMap(Map<Integer, String> map) {
|
||||||
|
Map<String, Integer> inverse = new HashMap<>();
|
||||||
for (Integer k : map.keySet()) {
|
for (Integer k : map.keySet()) {
|
||||||
inverse.put(map.get(k), k);
|
inverse.put(map.get(k), k);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,17 +1,75 @@
|
|||||||
public class Vertex {
|
import org.jheaps.AddressableHeap;
|
||||||
private final Integer peptide;
|
|
||||||
private final Integer occupancy;
|
|
||||||
|
|
||||||
public Vertex(Integer peptide, Integer occupancy) {
|
import java.io.Serializable;
|
||||||
this.peptide = peptide;
|
import java.util.Map;
|
||||||
this.occupancy = occupancy;
|
|
||||||
|
public class Vertex implements Serializable, Comparable<Vertex> {
|
||||||
|
private SequenceRecord record;
|
||||||
|
private Integer vertexLabel;
|
||||||
|
private Double potential;
|
||||||
|
private AddressableHeap queue;
|
||||||
|
|
||||||
|
public Vertex(SequenceRecord record, Integer vertexLabel) {
|
||||||
|
this.record = record;
|
||||||
|
this.vertexLabel = vertexLabel;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Integer getPeptide() {
|
public SequenceRecord getRecord() { return record; }
|
||||||
return peptide;
|
|
||||||
|
public SequenceType getType() { return record.getSequenceType(); }
|
||||||
|
|
||||||
|
public Integer getVertexLabel() {
|
||||||
|
return vertexLabel;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getSequence() {
|
||||||
|
return record.getSequence();
|
||||||
}
|
}
|
||||||
|
|
||||||
public Integer getOccupancy() {
|
public Integer getOccupancy() {
|
||||||
return occupancy;
|
return record.getOccupancy();
|
||||||
|
}
|
||||||
|
|
||||||
|
public Integer getReadCount() { return record.getReadCount(); }
|
||||||
|
|
||||||
|
public Map<Integer, Integer> getWellOccupancies() { return record.getWellOccupancies(); }
|
||||||
|
|
||||||
|
@Override //adapted from JGraphT example code
|
||||||
|
public int hashCode()
|
||||||
|
{
|
||||||
|
return (this.getSequence() == null) ? 0 : this.getSequence().hashCode();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override //adapted from JGraphT example code
|
||||||
|
public boolean equals(Object obj)
|
||||||
|
{
|
||||||
|
if (this == obj)
|
||||||
|
return true;
|
||||||
|
if (obj == null)
|
||||||
|
return false;
|
||||||
|
if (getClass() != obj.getClass())
|
||||||
|
return false;
|
||||||
|
Vertex other = (Vertex) obj;
|
||||||
|
if (this.getSequence() == null) {
|
||||||
|
return other.getSequence() == null;
|
||||||
|
} else {
|
||||||
|
return this.getSequence().equals(other.getSequence());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override //adapted from JGraphT example code
|
||||||
|
public String toString()
|
||||||
|
{
|
||||||
|
StringBuilder sb = new StringBuilder();
|
||||||
|
sb.append("(").append(vertexLabel)
|
||||||
|
.append(", Type: ").append(this.getType().name())
|
||||||
|
.append(", Sequence: ").append(this.getSequence())
|
||||||
|
.append(", Occupancy: ").append(this.getOccupancy()).append(")");
|
||||||
|
return sb.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int compareTo(Vertex other) {
|
||||||
|
return this.vertexLabel - other.getVertexLabel();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user