Skip to content

Commit

Permalink
Merge pull request #375 from vgteam/reproduce-pan-problem
Browse files Browse the repository at this point in the history
Add example needed to reproduce pannign issue (without crashing)
  • Loading branch information
adamnovak authored Dec 1, 2023
2 parents 26266cc + a0daaf6 commit 1082ccd
Show file tree
Hide file tree
Showing 15 changed files with 239 additions and 39 deletions.
3 changes: 2 additions & 1 deletion exampleData/cactus.bed
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
ref 10 100 this is a test region
ref 500 600 region without read tracks chunk-cactus-no-reads
ref 1000 2000 another region with a very long description to see how it will be displayed
ref 2000 3000 pre-fetched region chunk-ref-2000-3000
ref 4000 4500 pre-fetched region but the chunk is missing test_prechunk_missing
ref 482 2450 region without read tracks chunk-cactus-no-reads

Binary file modified exampleData/chunk-ref-2000-3000/chunk.vg
Binary file not shown.
Binary file not shown.
4 changes: 2 additions & 2 deletions exampleData/chunk-ref-2000-3000/chunk_contents.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
chunk.vg
chunk_0_ref_2000_3000.annotate.txt
chunk_0_ref_2000_3000.gam
chunk_0_ref_1955_5023.annotate.txt
chunk_0_ref_1955_5023.gam
regions.tsv
tracks.json
2 changes: 1 addition & 1 deletion exampleData/chunk-ref-2000-3000/regions.tsv
Original file line number Diff line number Diff line change
@@ -1 +1 @@
ref 2000 3000 chunk_0_ref_2000_3000.gam chunk_0_ref_2000_3000.annotate.txt
ref 1955 5024 exampleData/chunk-ref-2000-3000/chunk_0_ref_1955_5023.gam exampleData/chunk-ref-2000-3000/chunk_0_ref_1955_5023.annotate.txt
10 changes: 6 additions & 4 deletions exampleData/chunk-ref-2000-3000/tracks.json
Original file line number Diff line number Diff line change
@@ -1,18 +1,20 @@
[
{
"trackFile": "exampleData/cactus.vg",
"trackFile": "exampleData/cactus.vg.xg",
"trackType": "graph",
"trackColorSettings": {
"mainPalette": "plainColors",
"auxPalette": "greys"
"mainPalette": "#000000",
"auxPalette": "greys",
"colorReadsByMappingQuality": false
}
},
{
"trackFile": "exampleData/cactus-NA12879.sorted.gam",
"trackType": "read",
"trackColorSettings": {
"mainPalette": "blues",
"auxPalette": "reds"
"auxPalette": "reds",
"colorReadsByMappingQuality": false
}
}
]
162 changes: 162 additions & 0 deletions exampleData/chunk-without-source/chunk.vg
Original file line number Diff line number Diff line change
@@ -0,0 +1,162 @@
VG�%
g
cAGACGGAGTCTTGCTCTTGTTGCTCAGCCTGGAATGCAATGGCACGATCTCAGCTCACTGCAACCTCCACCTCCCGGGTTCAAGCAATTCTCCTGCCTC
g
cAGCCTCCCAAGTAGCAGGGATTACAGGTGCCTGCCACCATGCCAGGCTAATTGTTTTTTCTTTTTTTTCAGATGGAGTCTCACTCTGTCACTCAGGCTG
g
cGATTGTGATGGTGTGATCTCAGCTCACTGCAACCTCAACATCCTGGGTTCAAGCGATTCTCCTGCCTCAGTCTCCCAAGTAGCTGGGACTACAAGTGCG
g
cTGCCACCATGCCTGGCTAATTTTTTTTAGTATTTTTAGTAGAGATGGGGTTTCGCCATATTGGCCAGGCTGGTCTCAAACTCCTGATGTCAGGTGATCC
g
cGCCCTGAGGCTGAGGCAGGAGAATCATTTAAACCCAGGAGGCGGAGGTTGCAGTGAGCCAAGACTGGGCCACTGCACTCCAGCCTGCTAAGTGACAGAG
g
cTGAGACTCCACCTCAAAAAAAAAAAAAAAAGGCAATGCTTCAGGACATAAGGCCTTGCTCTGAAGAGGCCCTAGGAGTGACTCCTGGTGACAGTGAAAG
g
cCCCACAGCCTCTGGCAACTGTATTAACATGAACTTCAATCTGTTAAAGGAAAGCCACCAGGAAAACAGCACTGTAATTTAACGATGTGGAAAAATGTAT
g
cGTAATATCTTAAGGAAAAAAGCAAAACAGTGTAATTATGATCACATTTTATAAAATACACGTGTATATATACGCACATATGCCTGGTGGAGTTTTATGG!
g
cTGATCATCTCCAAGTGGTGGAATTACTGGGATTATTTTATTGTTTTTGTGTAAATTTATACTTTCTTTTTTCTTTTTGAGACACGGTCTCGCTCTGTCG"
g
cCCCAGGCTGGAGTACAGTGGTGTGATCGTGGCTCACTGAAGCATCAACCTCCTGAGCTCAAGTGATCCTCCCACCTCAGCTTCCCAAGTAGCTGCGACT#
g
cACAGGCATCTGCCACCACACCCAGCTACTTTTTAAATTTGTTGTACAGATGAAGTCTCCTTATGTTGCCCAGGCTGGTCTCGAACTTCTAGGCTCCCAC$
g
cCTTGACCTCCATCTTGACCTCCCAAAGTGCTGGAATTATAGGCATGAGCCACCATGCCCGGCCTTGATTTATGTTTTTGTGATGAACATTCATATCTTA%
g
cCTCCCACCCCATGGAAACAGTTCATGTATTACTTTTACAATATAAAACAAATAACAATAAAAACATCAAAAAGACATTTTAGCCATTCATTCAACAAAT&
g
cATTTAAAATGTGCCAAGAACTGTGCTACTCAAGCACCAGGTAATGAGTGATAAACCAAACCCATGCAAAAGGACCCCATATAGCACAGGTACATGCAGG'
g
cCACCTTACCATGGAAGCCATTGTCCTCTGTCCAGGCATCTGGCTGCACAACCACAATTGGGTGGACACCCTGGATCCCCAGGAAGGAAAGAGCATTCAA(
g
cAGTGTCAAAGTAGGACTACTGGAACTGTCACTTCATCATTTTTTTTGTTTGTTTTTGAGACAGGGTCTTGCTCTGTCACCCAGGCTGGAGTGCAGTGGT)
g
cGTGATCTCAGCTCACTGCCACCTCTGCCTCCTGGGCTCAAGCAATCCTTCCATCTCAGCCTCCTAAGTAGCTGGAACTACAGACACGTACCACCACCCC*
g
cTGGCTAATTTTTTTGTATTTTTGGTAGAGACAGGGTTTTGCCATGTTGCCCAGGCTGGTCTCAAACTCCTGGGCTCAACTTCACCCCCGGGATTATAGG+
g
cCATGAGCCACCGCACCCAGCCTTGGCTAATTTTTAATAATTTTTTTGTAGACATGAGGTCCTACTGTATTGCCCAGGCTGGTCTTCAGCTCCCAGGCTC,
g
cAAGCGATTCTCCCACCTTGGCCTCCCAGTGTTGTGATTACAGGGGTGGGGCACTGGCCCAGCCCATCATTTCTCTCTCTCTCTTTTTTTTTGAGACGGA-
g
cGTCTCGCTCTGTCGCCCGGGCTGGAGTGCAGTGGCGCGATCTTGGCTCACTGCAACCTCCGCCTCCGGGGTTCAAGCGATTCTCCTGCCCCAGCCCCTC.
g
cAAGTAGCTGGGACTACAGGCGTGCGCCCCTACGCCCAGCTAATTTTTGTATTTTTAGTAGAGACGGGGTTTCGCCATGTTGGTTGGCCAGGATGGTCTC/
g
cGATCTCTTGACCTCGTGATCTGCCCACCTCAGCCTCCCAAAGTGCTGGGATTACAGGCGTGAGCCACCGCACCTAGCTTTTCTCTCTCTCTCTTTTTTT0
g
cTTTTTTTTAGACAAAGTCTCACTCTGTCACCCAGTCTGGAGTGCAGTGGTGCAATCTTGGCTCACTGCAACCTCTGCCTCCCACGTTCAAGCGATGCTC1
g
cACACCTCAACTTCCCAAATAGCTGGCATTACAGGCATGCTCCACCAGGCCTGGCTACTTTTTGTTTTTTTTTTTTTAGTACAGATGGGGTTTCACCATG2
g
cTTGGCCAGGCTGGTCTCAAACTCCTGACAAGTGATCCACCTGCCTCGGCCTCCCAAAGTGCTGGGATTACAGACATGAGCCACCATGCCCAGCCTCCAG3
g
cCCCATCATTTCTTGATGATTTGTTGAAACACAGTATGCTGGGGCAGTCACAGAGAGGAGGGGGAGGGACATATGGGAAAAAGAGTTAGAGGGAAAAAGT4
g
cCTTCCCTCAGTATATTTAATATGTGCAGTTCTCAAATCCTTACCCATCCCTTACAGATGGAGTCTTTTGGCACAGGTATGTGGGCAGAGAAGACTTCTG5
g
cAGGCTACAGTAGGGGCATCCATAGGGACTGACAGGTGCCAGTCTTGCTCACAGGAGAGAATATTGTGTCCTCCCTCTCTGACAGGGCACCCAATACTTA6
g
cCTGTGCCAAGGGTGAATGATGAAAGCTCCTTCACCACAGAAGCACCACACAGCTGTACCATCCATTCCAGTTGATCTAAAATGGACATTTAGATGTAAA7
g
cATCACTGCAGTAATCTGCATACTTAACCCAGGCCCTCTACCCTACACTCTCCGGATGAAGGCTTATAGCAAGACCTCTCAATGGGAGAGTCTGTCTCTC8  !!""##$$%%&&''(())**++,,--..//00112233445566778�
GI262359905[76167]
8 cc(
7 cc(
6 cc(
5 cc(
4 cc(
3 cc(
2 cc(
1 cc(
0 cc( 
/ cc(

. cc( 
- cc( 
, cc(
+ cc(
* cc(
) cc(
( cc(
' cc(
& cc(
% cc(
$ cc(
# cc(
" cc(
! cc(
 cc(
 cc(
 cc(
 cc(
 cc(
 cc(
 cc(�
GI528476558[1937]
cc(
cc(
cc(
cc(
cc(
cc(
 cc(
!cc(
"cc( 
#cc(

$cc( 
%cc( 
&cc(
'cc(
(cc(
)cc(
*cc(
+cc(
,cc(
-cc(
.cc(
/cc(
0cc(
1cc(
2cc(
3cc(
4cc(
5cc(
6cc(
7cc(
8cc(�
ref
cc(
cc(
cc(
cc(
cc(
cc(
 cc(
!cc(
"cc( 
#cc(

$cc( 
%cc( 
&cc(
'cc(
(cc(
)cc(
*cc(
+cc(
,cc(
-cc(
.cc(
/cc(
0cc(
1cc(
2cc(
3cc(
4cc(
5cc(
6cc(
7cc(
8cc(
Expand Down
Binary file not shown.
5 changes: 5 additions & 0 deletions exampleData/chunk-without-source/chunk_contents.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
chunk.vg
chunk_0_ref_1955_5023.annotate.txt
chunk_0_ref_1955_5023.gam
regions.tsv
tracks.json
1 change: 1 addition & 0 deletions exampleData/chunk-without-source/regions.tsv
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
ref 2000 3000 chunk_0_ref_2000_3000.gam chunk_0_ref_2000_3000.annotate.txt
18 changes: 18 additions & 0 deletions exampleData/chunk-without-source/tracks.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
[
{
"trackFile": "exampleData/nonexistent.vg",
"trackType": "graph",
"trackColorSettings": {
"mainPalette": "plainColors",
"auxPalette": "greys"
}
},
{
"trackFile": "exampleData/stillnotareal.sorted.gam",
"trackType": "read",
"trackColorSettings": {
"mainPalette": "blues",
"auxPalette": "reds"
}
}
]
1 change: 1 addition & 0 deletions exampleData/no_source.bed
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
ref 2000 3000 region with no source graph available chunk-without-source
8 changes: 6 additions & 2 deletions scripts/prepare_chunks.sh
Original file line number Diff line number Diff line change
Expand Up @@ -115,6 +115,10 @@ do
printf "$file\n" >> $OUTDIR/chunk_contents.txt
done

# Print BED line
cat $OUTDIR/regions.tsv | cut -f1-3 | tr -d "\n"
# Print BED line, using the region we were passed as the coordinates
echo "${REGION%:*}" | tr -d "\n"
printf "\t"
echo "${REGION}" | rev | cut -f1 -d'-' | rev | tr -d "\n"
printf "\t"
echo "${REGION}" | rev | cut -f2 -d'-' | cut -f1 -d':' | rev | tr -d "\n"
printf "\t${DESC}\t${OUTDIR}\n"
64 changes: 35 additions & 29 deletions src/server.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -206,19 +206,23 @@ async function lockDirectories(directoryPaths, lockType, func) {
// attempt to acquire a lock for the next directory, and call lockDirectories on the remaining directories
const currDirectory = directoryPaths.pop();
return lockDirectory(currDirectory, lockType, async function() {
lockDirectories(directoryPaths, lockType, func);
return lockDirectories(directoryPaths, lockType, func);
})
}

// runs every hour
// deletes any files in the download directory past the set fileExpirationTime set in config
cron.schedule('0 * * * *', () => {
cron.schedule('0 * * * *', async () => {
console.log("cron scheduled check");
// attempt to acquire a write lock for each on the directory before attemping to delete files
for (const dir of [DOWNLOAD_DATA_PATH, UPLOAD_DATA_PATH]) {
lockDirectory(dir, lockTypes.WRITE_LOCK, async function() {
deleteExpiredFiles(dir);
});
try {
await lockDirectory(dir, lockTypes.WRITE_LOCK, async function() {
deleteExpiredFiles(dir);
});
} catch (e) {
console.error("Error checking for expired files in " + dir + ":", e);
}
}
});

Expand Down Expand Up @@ -349,20 +353,28 @@ function getGams(tracks) {
return getFilesOfType(tracks, fileTypes.READ);
}

// To bridge Express next(err) error handling and async function error
// handling, we have this adapter. It takes Express's next and an async
// function and calls next with any error raised when the async function is
// initially called *or* when its promise is awaited.
async function captureErrors(next, callback) {
try {
await callback();
} catch (e) {
next(e);
}
}

api.post("/getChunkedData", (req, res, next) => {
// We would like this to be an async function, but then Express error
// handling doesn't work, because it doesn't detect returned promise
// rejections until Express 5. We have to pass an error to next() or else
// throw synchronously.
//
// So we set up a promise here and we make sure to handle failures
// ourselves with next().

// put readlock on necessary directories while processing chunked data
lockDirectories([DOWNLOAD_DATA_PATH, UPLOAD_DATA_PATH], lockTypes.READ_LOCK, async function() {
let promise = getChunkedData(req, res, next);
promise.catch(next);
await promise;
captureErrors(next, async () => {
// put readlock on necessary directories while processing chunked data
return lockDirectories([DOWNLOAD_DATA_PATH, UPLOAD_DATA_PATH], lockTypes.READ_LOCK, async function() {
return getChunkedData(req, res, next);
});
});
});

Expand Down Expand Up @@ -1529,28 +1541,23 @@ async function getChunkTracks (bedFile, chunk) {
// Expects a request with a bed file and a chunk name
// Returns tracks retrieved from getChunkTracks
api.post("/getChunkTracks", (req, res, next) => {
console.log("received request for chunk tracks");
if (!req.body.bedFile || !req.body.chunk) {
throw new BadRequestError("Invalid request format", req.body.bedFile, req.body.chunk);
}
let promise = (async () => {
captureErrors(next, async () => {
console.log("received request for chunk tracks");
if (!req.body.bedFile || !req.body.chunk) {
throw new BadRequestError("Invalid request format", req.body.bedFile, req.body.chunk);
}

// tracks are falsy if fetch is unsuccessful

// TODO: This operation needs to hold a reader lock on the upload/download directories.
// waiting for lock changes to be merged
const tracks = await getChunkTracks(req.body.bedFile, req.body.chunk);
res.json({ tracks: tracks });
})();

// schedules next to be called if promise is rejected
promise.catch(next);

});
});

api.post("/getBedRegions", (req, res, next) => {
// Bridge async functions to Express error handling with next(err). Don't
// return a promise.
let promise = (async () => {
captureErrors(next, async () => {
console.log("received request for bedRegions");
const result = {
bedRegions: [],
Expand All @@ -1564,8 +1571,7 @@ api.post("/getBedRegions", (req, res, next) => {
} else {
throw new BadRequestError("No BED file specified");
}
})();
promise.catch(next);
});
});

// Load up the given BED file by URL or path, and
Expand Down

0 comments on commit 1082ccd

Please sign in to comment.