Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Assaydev persite #185

Open
wants to merge 4 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion documentation/DCP-documentation/step_2_submit_jobs.md
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ As of Distributed-CellProfiler 2.2.0, `run_batch_general.py` has been reformatte
### Required inputs

* `step` is the step that you would like to make jobs for.
Supported steps are `zproj`, `illum`, `qc`, `qc_persite`, `assaydev`, and`analysis`
Supported steps are `zproj`, `illum`, `qc`, `qc_persite`, `assaydev`, `assaydev_persite`, and `analysis`
* `identifier` is the project identifier (e.g. "cpg0000-jump-pilot" or "2024_11_07_Collaborator_Cell_Painting")
* `batch` is the name of the data batch (e.g. "2020_11_04_CPJUMP1")
* `platelist` is the list of plates to process.
Expand Down
69 changes: 67 additions & 2 deletions run_batch_general.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def scheduleBatch(self, data):


def run_batch_general(
step, # (zproj, illum, qc, qc_persite, assaydev, or analysis)
step, # (zproj, illum, qc, qc_persite, assaydev, assaydev_persite, or analysis)
identifier="", # (e.g. cpg0000-jump-pilot)
batch="", # (e.g. 2020_11_04_CPJUMP1)
platelist=[], # (e.g. ['Plate1','Plate2'])
Expand Down Expand Up @@ -427,6 +427,71 @@ def run_batch_general(

print("AssayDev job submitted. Check your queue")

elif step == "assaydev_persite":
assaydevqueue = JobQueue(f"{identifier}_AssayDev")
if not outpath:
outpath = path_dict[path_style]["assaydevoutpath"]
if not usebatch:
if not pipeline:
pipeline = "assaydev.cppipe"
if not csvname:
csvname = "load_data_with_illum.csv"

for plate in platelist:
if all(len(ele) == 0 for ele in wells):
for eachrow in rows:
for eachcol in columns:
for site in sites:
templateMessage_ad = {
"Metadata": f"Metadata_Plate={plate},Metadata_Well={eachrow}{int(eachcol):{well_format}},Metadata_Site={site}",
"pipeline": posixpath.join(pipelinepath, pipeline),
"output": outpath,
"input": inputpath,
"data_file": posixpath.join(datafilepath, plate, csvname),
}
assaydevqueue.scheduleBatch(templateMessage_ad)
else:
for well in wells:
for site in sites:
templateMessage_ad = {
"Metadata": f"Metadata_Plate={plate},Metadata_Well={eachwell},Metadata_Site={site}",
"pipeline": posixpath.join(pipelinepath, pipeline),
"output": outpath,
"input": inputpath,
"data_file": posixpath.join(datafilepath, plate, csvname),
}
assaydevqueue.scheduleBatch(templateMessage_ad)
else:
if not batchfile:
batchfile = "Batch_data_assaydev.h5"
for plate in platelist:
if all(len(ele) == 0 for ele in wells):
for eachrow in rows:
for eachcol in columns:
for site in sites:
templateMessage_ad = {
"Metadata": f"Metadata_Plate={plate},Metadata_Well={eachrow}{int(eachcol):{well_format}},Metadata_Site={site}",
"pipeline": posixpath.join(batchpath, batchfile),
"output": outpath,
"input": inputpath,
"data_file": posixpath.join(batchpath, batchfile),
}
assaydevqueue.scheduleBatch(templateMessage_ad)
else:
for eachwell in wells:
for site in sites:
templateMessage_ad = {
"Metadata": f"Metadata_Plate={plate},Metadata_Well={eachwell},Metadata_Site={site}",
"pipeline": posixpath.join(batchpath, batchfile),
"output": outpath,
"input": inputpath,
"data_file": posixpath.join(batchpath, batchfile),
}
assaydevqueue.scheduleBatch(templateMessage_ad)

print("AssayDev job submitted. Check your queue")


elif step == "analysis":
analysisqueue = JobQueue(f"{identifier}_Analysis")
if not outputstructure:
Expand Down Expand Up @@ -512,7 +577,7 @@ def run_batch_general(
)
parser.add_argument(
"step",
help="Step to make jobs for. Supported steps are zproj, illum, qc, qc_persite, assaydev, analysis",
help="Step to make jobs for. Supported steps are zproj, illum, qc, qc_persite, assaydev, assaydev_persite, analysis",
)
parser.add_argument("identifier", help="Project identifier")
parser.add_argument("batch", help="Name of batch")
Expand Down