Skip to content

Commit 042df07

Browse files
committed
WIP #80 import ok, rest fails
1 parent e3002c2 commit 042df07

File tree

4 files changed

+64
-124
lines changed

4 files changed

+64
-124
lines changed

client/src/views/CompareView.vue

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -59,12 +59,12 @@ const onExport = () => {
5959
function load(operation) {
6060
records.value = undefined
6161
loading.value = true
62-
HTTP.get('/compare/ops', {
62+
HTTP.get('/compare', {
6363
params: {
64-
datasetIdsA: selectedDatasetA.value,
65-
datasetIdsB: selectedDatasetB.value,
66-
datasetUpload: datasetUploaded.value,
67-
queryOperation: operation
64+
reference: selectedDatasetA.value,
65+
comparison: selectedDatasetB.value,
66+
upload: datasetUploaded.value,
67+
operation: operation
6868
},
6969
paramsSerializer: {
7070
indexes: null

server/src/scimodom/api/public.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -117,18 +117,18 @@ def get_browse():
117117
return public_service.get_dataset()
118118

119119

120-
@api.route("/compare/<step>", methods=["GET"])
120+
@api.route("/compare", methods=["GET"])
121121
@cross_origin(supports_credentials=True)
122-
def get_compare(step):
122+
def get_compare():
123123
"""Compare view API."""
124-
dataset_ids_a = request.args.getlist("datasetIdsA", type=str)
125-
dataset_ids_b = request.args.getlist("datasetIdsB", type=str)
126-
dataset_upload = request.args.get("datasetUpload", type=str)
127-
query_operation = request.args.get("queryOperation", type=str)
124+
reference_ids = request.args.getlist("reference", type=str)
125+
comparison_ids = request.args.getlist("comparison", type=str)
126+
upload_path = request.args.get("upload", type=str)
127+
query_operation = request.args.get("operation", type=str)
128128

129129
public_service = get_public_service()
130130
response = public_service.get_comparison(
131-
step, dataset_ids_a, dataset_ids_b, dataset_upload, query_operation
131+
reference_ids, comparison_ids, upload_path, query_operation
132132
)
133133
return response
134134

server/src/scimodom/services/dataset.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020
import scimodom.database.queries as queries
2121
from scimodom.services.annotation import AnnotationService
2222
from scimodom.services.assembly import AssemblyService, AssemblyVersionError
23-
from scimodom.services.importer import get_importer, get_bed_importer
23+
from scimodom.services.importer import get_importer
2424
import scimodom.utils.specifications as specs
2525
import scimodom.utils.utils as utils
2626

server/src/scimodom/services/public.py

Lines changed: 51 additions & 111 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@
2929
Selection,
3030
)
3131
import scimodom.database.queries as queries
32-
from scimodom.services.importer import BEDImporter
32+
from scimodom.services.importer import get_bed_importer
3333
from scimodom.services.annotation import AnnotationService
3434
from scimodom.services.assembly import AssemblyService
3535
import scimodom.utils.specifications as specs
@@ -465,126 +465,66 @@ def get_dataset(self):
465465

466466
return self._dump(query)
467467

468-
def get_comparison(
469-
self, step, dataset_ids_a, dataset_ids_b, dataset_upload, query_operation
470-
):
468+
def get_comparison(self, reference_ids, comparison_ids, upload, query_operation):
471469
"""Retrieve ..."""
472470
# TODO: refactor
473471
# API call in compare, thenquery_operation pass as params to SPA components
474472
# but sending all datasets may be too large?
475473
# final call after dataset selection + query
476474
# + lazy loading of results?
477475

478-
# TODO: this will not work... dataset vs. modification?
479-
if step == "dataset":
480-
query = (
481-
select(
482-
Dataset.id.label("dataset_id"),
483-
Dataset.title.label("dataset_title"),
484-
Modification.id.label("modification_id"),
485-
DetectionTechnology.id.label("technology_id"),
486-
Organism.id.label("organism_id"),
487-
)
488-
.join_from(Dataset, Association, Dataset.id == Association.dataset_id)
489-
.join_from(
490-
Association, Selection, Association.selection_id == Selection.id
491-
)
492-
.join_from(
493-
Selection,
494-
Modification,
495-
Selection.modification_id == Modification.id,
496-
)
497-
.join_from(
498-
Selection,
499-
DetectionTechnology,
500-
Selection.technology_id == DetectionTechnology.id,
501-
)
502-
.join_from(Selection, Organism, Selection.organism_id == Organism.id)
503-
)
504-
505-
records = self._dump(query)
506-
507-
# query = (
508-
# select(Taxa.short_name.distinct(), Taxonomy.kingdom)
509-
# .join_from(Taxa, Taxonomy, Taxa.taxonomy_id == Taxonomy.id)
510-
# .join_from(Taxa, Organism, Taxa.id == Organism.taxa_id)
511-
# )
512-
513-
## so far no order
514-
## [('H. sapiens', 'Animalia'), ('M. musculus', 'Animalia')]
515-
## we need to reformat to fit the "grouped dropdown component"
516-
## we also probably need to add ids to retrieve the final selection
517-
## i.e. taxa, modification, and technology ids
518-
## same below
519-
520-
# query = select(
521-
# Modification.rna.distinct(),
522-
# Modomics.short_name,
523-
# ).join_from(Modification, Modomics, Modification.modomics_id == Modomics.id)
524-
525-
## [('mRNA', 'm6A'), ('mRNA', 'm5C'), ('rRNA', 'm6A'), ('mRNA', 'Y'), ('tRNA', 'Y')]
526-
527-
# query = select(DetectionMethod.meth.distinct(), DetectionTechnology.tech).join_from(
528-
# DetectionMethod,
529-
# DetectionTechnology,
530-
# DetectionMethod.id == DetectionTechnology.method_id,
531-
# )
532-
533-
## [('Chemical-assisted sequencing', 'm6A-SAC-seq'), ('Native RNA sequencing', 'Nanopore'), ('Chemical-assisted sequencing', 'GLORI'), ('Enzyme/protein-assisted sequencing', 'm5C-miCLIP'), ('Enzyme/protein-assisted sequencing', 'm6ACE-seq'), ('Chemical-assisted sequencing', 'BID-seq'), ('Antibody-based sequencing', 'm6A-seq/MeRIP'), ('Enzyme/protein-assisted sequencing', 'eTAM-seq')]
534-
535-
elif step == "ops":
536-
query = (
537-
select(
538-
Data.chrom,
539-
Data.start,
540-
Data.end,
541-
Data.name,
542-
Data.score,
543-
Data.strand,
544-
Association.dataset_id,
545-
# Data.dataset_id,
546-
Data.coverage,
547-
Data.frequency,
548-
)
549-
.join_from(Data, Association, Data.inst_association)
550-
.where(Association.dataset_id.in_(dataset_ids_a))
551-
# .order_by(Data.chrom.asc(), Data.start.asc())
476+
query = (
477+
select(
478+
Data.chrom,
479+
Data.start,
480+
Data.end,
481+
Data.name,
482+
Data.score,
483+
Data.strand,
484+
Association.dataset_id,
485+
# Data.dataset_id,
486+
Data.coverage,
487+
Data.frequency,
552488
)
553-
a_records = self._session.execute(query).all()
554-
555-
# AD HOC - EUF VERSION SHOULD COME FROM SOMEWHERE ELSE!
556-
if dataset_upload:
557-
filen = Path(dataset_upload).stem
558-
b_records = [
559-
BEDImporter(
560-
filen, open(dataset_upload, "r"), filen, "1.7"
561-
).get_records()
562-
]
563-
else:
564-
b_records = []
565-
for idx in dataset_ids_b:
566-
query = (
567-
select(
568-
Data.chrom,
569-
Data.start,
570-
Data.end,
571-
Data.name,
572-
Data.score,
573-
Data.strand,
574-
Association.dataset_id,
575-
# Data.dataset_id,
576-
Data.coverage,
577-
Data.frequency,
578-
)
579-
.join_from(Data, Association, Data.inst_association)
580-
.where(Association.dataset_id == idx)
581-
# .where(Data.dataset_id == idx)
489+
.join_from(Data, Association, Data.inst_association)
490+
.where(Association.dataset_id.in_(reference_ids))
491+
# .order_by(Data.chrom.asc(), Data.start.asc())
492+
)
493+
a_records = self._session.execute(query).all()
494+
495+
# AD HOC - EUF VERSION SHOULD COME FROM SOMEWHERE ELSE!
496+
if upload:
497+
importer = get_bed_importer(upload)
498+
importer.parse_records()
499+
importer.close()
500+
b_records = importer.get_buffer()
501+
# records = [tuple([val for key, val in record.items()]) for record in b_records]
502+
# print(b_records)
503+
else:
504+
b_records = []
505+
for idx in comparison_ids:
506+
query = (
507+
select(
508+
Data.chrom,
509+
Data.start,
510+
Data.end,
511+
Data.name,
512+
Data.score,
513+
Data.strand,
514+
Association.dataset_id,
515+
# Data.dataset_id,
516+
Data.coverage,
517+
Data.frequency,
582518
)
583-
b_records.append(get_session().execute(query).all())
519+
.join_from(Data, Association, Data.inst_association)
520+
.where(Association.dataset_id == idx)
521+
# .where(Data.dataset_id == idx)
522+
)
523+
b_records.append(get_session().execute(query).all())
584524

585-
op, strand = query_operation.split("S")
586-
c_records = get_op(op)(a_records, b_records, s=eval(strand))
587-
records = [records_factory(op.capitalize(), r)._asdict() for r in c_records]
525+
op, strand = query_operation.split("S")
526+
c_records = get_op(op)(a_records, b_records, s=eval(strand))
527+
records = [records_factory(op.capitalize(), r)._asdict() for r in c_records]
588528

589529
return records
590530

0 commit comments

Comments
 (0)