Skip to content

Commit

Permalink
update
Browse files Browse the repository at this point in the history
  • Loading branch information
tractatus committed Dec 7, 2023
1 parent 21f7750 commit 9ef4ffe
Show file tree
Hide file tree
Showing 76 changed files with 1,061 additions and 28 deletions.
Binary file modified .DS_Store
Binary file not shown.
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -23,11 +23,13 @@
# RStudio files
.Rproj.user/

/figure01
/training_data/dapi/images/*.tif
/training_data/dapi/masks/*.tif

/training_data/cytosol/images/*.tif
/training_data/cytosol/masks/*.tif
/training_data/cytosol/weight_maps/*.tif

# produced vignettes
vignettes/*.html
Expand Down
5 changes: 3 additions & 2 deletions .quarto/_freeze/supplementary_fig02/execute-results/md.json

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion .quarto/xref/693ec055
Original file line number Diff line number Diff line change
@@ -1 +1 @@
{"headings":["create-a-table-of-raw-data-files","import-the-data","make-a-master-data-frame-with-all-data.","make-a-plot"],"entries":[]}
{"entries":[],"headings":["create-a-table-of-raw-data-files","import-the-data","make-a-master-data-frame-with-all-data","make-a-plot","same-for-ms-data","peak-detection-and-annotation-in-ms","bargraph-of-tfp-ester-labeling-gels","absorption-spectrum"]}
7 changes: 7 additions & 0 deletions .quarto/xref/INDEX
Original file line number Diff line number Diff line change
Expand Up @@ -5,5 +5,12 @@
"supplementary_fig02.qmd": {
"supplementary_fig02.html": "54dbd63a",
"supplementary_fig02.md": "693ec055"
},
"presentations/20230915/index.qmd": {
"index.html": "f1b442c2"
},
"figure01.qmd": {
"figure01.html": "9560ce1b",
"figure01.md": "0453fe3b"
}
}
108 changes: 107 additions & 1 deletion augment.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,54 @@
import os
import cv2
import numpy as np

from skimage.segmentation import find_boundaries

w0 = 10
sigma = 5

def make_weight_map(mask):
"""
Generate the weight map for a binary mask as specified in the UNet paper.
Parameters
----------
mask: array-like
A 2D binary mask of shape (image_height, image_width).
Returns
-------
array-like
A 2D weight map of shape (image_height, image_width).
"""
nrows, ncols = mask.shape
mask = (mask > 0).astype(int)

# Compute the distance map
X1, Y1 = np.meshgrid(np.arange(nrows), np.arange(ncols))
bounds = find_boundaries(mask, mode='inner')
X2, Y2 = np.nonzero(bounds)
xSum = (X2.reshape(-1, 1) - X1.reshape(1, -1)) ** 2
ySum = (Y2.reshape(-1, 1) - Y1.reshape(1, -1)) ** 2
distMap = np.sqrt(xSum + ySum)

# Calculate the border loss map
w0 = 10.0 # Adjust this value as needed
sigma = 5.0 # Adjust this value as needed
border_loss_map = w0 * np.exp((-1 * (distMap) ** 2) / (2 * (sigma ** 2)))

# Compute the class weight map
loss = np.zeros((nrows, ncols))
w_1 = 1 - mask.sum() / loss.size
w_0 = 1 - w_1
loss[mask == 1] = w_1
loss[mask == 0] = w_0

# Combine border loss and class weight to get the final weight map
weight_map = border_loss_map + loss

return weight_map

# Set the paths for the input folders
mask_org_folder = "./training_data/masks_cytosol"
Expand All @@ -8,10 +57,12 @@
# Set the paths for the output folders
mask_folder = "./training_data/cytosol/masks"
images_folder = "./training_data/cytosol/images"
weights_folder = "./training_data/cytosol/weight_maps"

# Create the output folders if they don't exist
os.makedirs(mask_folder, exist_ok=True)
os.makedirs(images_folder, exist_ok=True)
os.makedirs(weights_folder, exist_ok=True)

# Set the ROI size and step
roi_size = (256, 256)
Expand All @@ -25,10 +76,58 @@
# Read the mask image
mask_path = os.path.join(mask_org_folder, file_name)
mask = cv2.imread(mask_path, cv2.IMREAD_GRAYSCALE)
mask_image = cv2.imread(mask_path, cv2.IMREAD_GRAYSCALE)

# Convert the image to float32 format for Laplacian operation
mask = mask.astype(np.float32)

# Apply Laplacian filter
B = cv2.Laplacian(mask, cv2.CV_32F)

# Take the absolute value in-place
B = np.abs(B)

# Divide by B (this will produce a divide by zero warning that you can safely ignore)
B /= B
B = np.nan_to_num(B, nan=0)

# Multiply by mask
B *= mask

mask = mask - B

# Convert B back to uint8 if needed
mask = mask.astype(np.uint8)

# Define the kernel (structuring element) for erosion
kernel_size = 3 # Adjust the size as needed
kernel = np.ones((kernel_size, kernel_size), dtype=np.uint8)
# Perform erosion
mask = cv2.erode(mask, kernel, iterations=4)
#
# # Define the new width and height (25% of the original size)
# new_width = int(mask.shape[1] * 0.25)
# new_height = int(mask.shape[0] * 0.25)
#
# # Resize the image using bicubic interpolation
# mask = cv2.resize(mask, (new_width, new_height), interpolation=cv2.INTER_CUBIC)
#
_, mask = cv2.threshold(mask, 0, 255, cv2.THRESH_BINARY)
_, thresholded_mask2 = cv2.threshold(mask_image, 0, 255, cv2.THRESH_BINARY)


boundary = thresholded_mask2-mask
boundary = boundary.astype(np.uint8)

dist = cv2.distanceTransform(255-boundary, cv2.DIST_L2, cv2.DIST_MASK_5)

weight_map = w0 * np.exp((-1 * (dist) ** 2) / (2 * (sigma ** 2)))
weight_map = weight_map.astype(np.uint8)

# Read the corresponding input image
image_path = os.path.join(images_org_folder, file_name)
image = cv2.imread(image_path, cv2.IMREAD_GRAYSCALE | cv2.IMREAD_ANYDEPTH)
#image = cv2.resize(image, (new_width, new_height), interpolation=cv2.INTER_CUBIC)

# Get the dimensions of the images
mask_height, mask_width = mask.shape[:2]
Expand All @@ -42,11 +141,18 @@

# Extract the ROI from the image
roi_image = image[y:y+roi_size[0], x:x+roi_size[1]]


# Extract the ROI from the weight
roi_weight = weight_map[y:y+roi_size[0], x:x+roi_size[1]]

# Save the ROI as a new image in the mask folder
new_mask_path = os.path.join(mask_folder, f"{file_name}_{y}_{x}.tif")
cv2.imwrite(new_mask_path, roi_mask)

# Save the ROI as a new image in the images folder
new_image_path = os.path.join(images_folder, f"{file_name}_{y}_{x}.tif")
cv2.imwrite(new_image_path, roi_image)

# Save the ROI as a new image in the images folder
new_weight_path = os.path.join(weights_folder, f"{file_name}_{y}_{x}.tif")
cv2.imwrite(new_weight_path, roi_weight)
52 changes: 52 additions & 0 deletions augment_dapi.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
import os
import cv2

# Set the paths for the input folders
mask_org_folder = "./training_data/masks_dapi"
images_org_folder = "./training_data/images_dapi"

# Set the paths for the output folders
mask_folder = "./training_data/dapi/masks"
images_folder = "./training_data/dapi/images"

# Create the output folders if they don't exist
os.makedirs(mask_folder, exist_ok=True)
os.makedirs(images_folder, exist_ok=True)

# Set the ROI size and step
roi_size = (256, 256)
roi_step = 128

# Get the list of file names in the mask_org folder
file_names = [file for file in os.listdir(mask_org_folder) if file.lower().endswith(('.tif', '.tiff'))]

# Iterate over the file names
for file_name in file_names:
# Read the mask image
mask_path = os.path.join(mask_org_folder, file_name)
mask = cv2.imread(mask_path, cv2.IMREAD_GRAYSCALE)

# Read the corresponding input image
image_path = os.path.join(images_org_folder, file_name)
image = cv2.imread(image_path, cv2.IMREAD_GRAYSCALE | cv2.IMREAD_ANYDEPTH)

# Get the dimensions of the images
mask_height, mask_width = mask.shape[:2]
image_height, image_width = image.shape[:2]

# Iterate over the ROI positions
for y in range(0, mask_height - roi_size[0] + 1, roi_step):
for x in range(0, mask_width - roi_size[1] + 1, roi_step):
# Extract the ROI from the mask
roi_mask = mask[y:y+roi_size[0], x:x+roi_size[1]]

# Extract the ROI from the image
roi_image = image[y:y+roi_size[0], x:x+roi_size[1]]

# Save the ROI as a new image in the mask folder
new_mask_path = os.path.join(mask_folder, f"{file_name}_{y}_{x}.tif")
cv2.imwrite(new_mask_path, roi_mask)

# Save the ROI as a new image in the images folder
new_image_path = os.path.join(images_folder, f"{file_name}_{y}_{x}.tif")
cv2.imwrite(new_image_path, roi_image)
43 changes: 43 additions & 0 deletions augment_weight.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
import os
import cv2
import numpy as np

# Set the paths for the input folders
weights_org_folder = "./training_data/weights_cytosol"

# Set the paths for the output folders
weights_folder = "./training_data/cytosol/weight_maps"

# Create the output folders if they don't exist
os.makedirs(weights_folder, exist_ok=True)

# Set the ROI size and step
roi_size = (256, 256)
roi_step = 128

# Get the list of file names in the mask_org folder
file_names = [file for file in os.listdir(weights_org_folder) if file.lower().endswith(('.tif', '.tiff'))]

# Iterate over the file names
for file_name in file_names:
# Read the mask image
print(file_name)
weight_path = os.path.join(weights_org_folder, file_name)
weight = cv2.imread(weight_path, cv2.IMREAD_UNCHANGED)

# Convert the image to float32 format for Laplacian operation
weight = weight.astype(np.float32)

# Get the dimensions of the images
weight_height, weight_width = weight.shape[:2]

# Iterate over the ROI positions
for y in range(0, weight_height - roi_size[0] + 1, roi_step):
for x in range(0, weight_width - roi_size[1] + 1, roi_step):

# Extract the ROI from the weight
roi_weight = weight[y:y+roi_size[0], x:x+roi_size[1]]

# Save the ROI as a new image in the images folder
new_weight_path = os.path.join(weights_folder, f"{file_name}_{y}_{x}.tif")
cv2.imwrite(new_weight_path, roi_weight)
Binary file added border_loss_map.tif
Binary file not shown.
9 changes: 9 additions & 0 deletions data/csv/fig02_cytosol_cMycIsotype_Results.csv
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
,Label,Mean,Min,Max,Median
1,A:0003-0435-0456,168.495,0,531,199
2,A:0003-0177-0985,160.733,0,480,198
3,A:0003-0444-1366,160.783,0,383,198
4,A:0003-0852-1427,160.357,0,422,205
5,A:0003-1455-0942,172.318,0,360,201
6,A:0003-1071-0381,177.828,0,371,200
7,A:0003-0739-0646,181.174,0,411,202
8,A:0003-1041-0991,167.445,0,368,207
29 changes: 29 additions & 0 deletions data/csv/fig02_cytosol_intensity_Results.csv
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
,Label,Mean,Min,Max,Median
1,A:0003-0435-0456,168.495,0,531,199
2,A:0003-0177-0985,160.733,0,480,198
3,A:0003-0444-1366,160.783,0,383,198
4,A:0003-0852-1427,160.357,0,422,205
5,A:0003-1455-0942,172.318,0,360,201
6,A:0003-1071-0381,177.828,0,371,200
7,A:0003-0739-0646,181.174,0,411,202
8,A:a,167.445,0,368,207
9,cMycMax:0003-0525-0460,167.621,0,706,218
10,cMycMax:0003-0237-1005,187.198,0,664,226
11,cMycMax:0003-0427-1131,176.311,0,863,227
12,cMycMax:0003-0757-1365,192.152,0,716,228
13,cMycMax:0003-0747-1041,158.532,0,665,247
14,cMycMax:0003-0805-0802,171.970,0,940,242
15,cMycMax:0003-1008-0588,171.795,0,467,245
16,cMycMax:0003-1282-1510,151.439,0,604,227
17,cMycMax:0003-1300-0888,197.114,0,879,231
18,cMycMax:b,190.831,0,522,222
19,isotype:0003-0841-0262,170.476,0,403,207
20,isotype:0003-0511-0694,176.406,0,684,211
21,isotype:0003-0225-0988,183.841,0,453,213
22,isotype:0003-0103-1492,162.702,0,365,206
23,isotype:0003-0532-1595,210.358,0,373,214
24,isotype:0003-0675-1117,171.045,0,536,217
25,isotype:0003-0939-0679,178.793,0,666,218
26,isotype:0003-1008-1426,167.730,0,432,211
27,isotype:0003-1293-0927,176.555,0,411,211
28,isotype:0003-1243-0300,171.558,0,665,207
30 changes: 30 additions & 0 deletions data/csv/fig02_nuclei_intensity_Results.csv
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
,Label,Mean,Min,Max,Median
1,C3-MAX_PC3_230925_CMYC_igg_ON_wash_1_MMStack_Pos0.ome-1-1-1.tif:0001-0144,246.735,192,475,244
2,C3-MAX_PC3_230925_CMYC_igg_ON_wash_1_MMStack_Pos0.ome-1-1-1.tif:0002-0429,245.010,189,645,242
3,C3-MAX_PC3_230925_CMYC_igg_ON_wash_1_MMStack_Pos0.ome-1-1-1.tif:0004-0491,243.300,187,608,239
4,C3-MAX_PC3_230925_CMYC_igg_ON_wash_1_MMStack_Pos0.ome-1-1-1.tif:0006-0766,275.857,207,865,271
5,C3-MAX_PC3_230925_CMYC_igg_ON_wash_1_MMStack_Pos0.ome-1-1-1.tif:0007-0875,244.401,195,502,241
6,C3-MAX_PC3_230925_CMYC_igg_ON_wash_1_MMStack_Pos0.ome-1-1-1.tif:0008-1056,268.981,202,614,264
7,C3-MAX_PC3_230925_CMYC_igg_ON_wash_1_MMStack_Pos0.ome-1-1-1.tif:0009-1087,262.395,195,512,260
8,C3-MAX_PC3_230925_CMYC_igg_ON_wash_1_MMStack_Pos0.ome-1-1-1.tif:a,257.058,198,509,254
9,C3-MAX_PC3_230925_MycMAX_ON_wash_10_MMStack_Pos0.ome-1-1.tif:0001-0223,348.271,227,1502,336
10,C3-MAX_PC3_230925_MycMAX_ON_wash_10_MMStack_Pos0.ome-1-1.tif:0002-0470,317.606,217,904,308
11,C3-MAX_PC3_230925_MycMAX_ON_wash_10_MMStack_Pos0.ome-1-1.tif:0003-0478,317.967,220,1247,308
12,C3-MAX_PC3_230925_MycMAX_ON_wash_10_MMStack_Pos0.ome-1-1.tif:0004-0760,432.118,250,1648,418
13,C3-MAX_PC3_230925_MycMAX_ON_wash_10_MMStack_Pos0.ome-1-1.tif:0005-0740,358.088,255,804,357
14,C3-MAX_PC3_230925_MycMAX_ON_wash_10_MMStack_Pos0.ome-1-1.tif:0006-0846,329.041,221,983,319
15,C3-MAX_PC3_230925_MycMAX_ON_wash_10_MMStack_Pos0.ome-1-1.tif:0007-0851,315.532,232,524,314
16,C3-MAX_PC3_230925_MycMAX_ON_wash_10_MMStack_Pos0.ome-1-1.tif:0008-1022,332.078,249,581,329
17,C3-MAX_PC3_230925_MycMAX_ON_wash_10_MMStack_Pos0.ome-1-1.tif:0009-1211,351.233,224,1086,346
18,C3-MAX_PC3_230925_MycMAX_ON_wash_10_MMStack_Pos0.ome-1-1.tif:0010-1307,331.643,233,1038,324
19,C3-MAX_PC3_230925_MycMAX_ON_wash_10_MMStack_Pos0.ome-1-1.tif:b,313.757,228,906,307
20,C3-MAX_PC3_230925_IGG_On_wash_1_MMStack_Pos0.ome-1-1-1.tif:0001-0048,258.937,204,478,257
21,C3-MAX_PC3_230925_IGG_On_wash_1_MMStack_Pos0.ome-1-1-1.tif:0002-0308,275.938,208,730,270
22,C3-MAX_PC3_230925_IGG_On_wash_1_MMStack_Pos0.ome-1-1-1.tif:0003-0552,267.029,207,996,262
23,C3-MAX_PC3_230925_IGG_On_wash_1_MMStack_Pos0.ome-1-1-1.tif:0004-0629,286.298,211,1287,276
24,C3-MAX_PC3_230925_IGG_On_wash_1_MMStack_Pos0.ome-1-1-1.tif:0005-0738,253.333,197,593,251
25,C3-MAX_PC3_230925_IGG_On_wash_1_MMStack_Pos0.ome-1-1-1.tif:0006-0714,252.963,213,352,252
26,C3-MAX_PC3_230925_IGG_On_wash_1_MMStack_Pos0.ome-1-1-1.tif:0007-0958,285.719,212,644,279
27,C3-MAX_PC3_230925_IGG_On_wash_1_MMStack_Pos0.ome-1-1-1.tif:0008-1026,266.693,198,654,263
28,C3-MAX_PC3_230925_IGG_On_wash_1_MMStack_Pos0.ome-1-1-1.tif:0009-1296,256.807,195,715,254
29,C3-MAX_PC3_230925_IGG_On_wash_1_MMStack_Pos0.ome-1-1-1.tif:0010-1327,252.591,202,507,250
1 change: 1 addition & 0 deletions figure01.qmd
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,7 @@ for (sample_name in unique_samples) {
axis(1, at=seq(300,800,by=50))
peak.max
```

Save the plot.
Expand Down
Binary file not shown.
Binary file not shown.
Binary file not shown.
23 changes: 23 additions & 0 deletions get_data.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
#!/bin/bash

# Specify the folder containing TIF images
image_folder="./figure01/230913"

# Specify the Python script to run
python_script="figure01.py"

# Check if the folder exists
if [ ! -d "$image_folder" ]; then
echo "Folder not found: $image_folder"
exit 1
fi

# Iterate through TIF images in the folder
for image_file in "$image_folder"/*.tif; do
if [ -f "$image_file" ]; then
echo "Processing image: $image_file"
python "$python_script" "$image_file"
else
echo "No TIF images found in $image_folder"
fi
done
Loading

0 comments on commit 9ef4ffe

Please sign in to comment.