Skip to content

Commit

Permalink
Update Uploader.svelte
Browse files Browse the repository at this point in the history
  • Loading branch information
patrick-koenig committed Feb 23, 2024
1 parent b441561 commit f5bb4f9
Showing 1 changed file with 121 additions and 121 deletions.
242 changes: 121 additions & 121 deletions src/components/isa/generic/Uploader.svelte
Original file line number Diff line number Diff line change
Expand Up @@ -4,134 +4,134 @@ import get from 'lodash.get';
import set from 'lodash.set';
import TableLoader from "./TableLoader.svelte";
import { DataFrame } from "dataframe-js";
import Schema from "@/lib/schemas.js";
import { onMount } from "svelte";
import { isaObj } from '@/stores/isa';
let study;
export { study as value };
export let jsonPath;
//FIXME: In Expert mode this is not working as it only checks on component creation, but not if the parameter value is changed later
//FIXME: Value is not updated directly in the isa-json when the component loads
onMount(()=>{
let growth_protocol = study.protocols.find(protocol => protocol.name === 'Growth');
if (study.processSequence.length > 0) {
let growth_process = study.processSequence.find(process => {
return process.executesProtocol.name === 'Growth'
})
if (growth_process) {
for (let parameterValue of growth_process.parameterValues) {
let protocol_parameter = growth_protocol.parameters.find(parameter => parameter.parameterName.annotationValue === parameterValue.category.parameterName.annotationValue)
if (protocol_parameter) {
parameterValue.value = protocol_parameter.comments[0].value;
}
import TableLoader from "./TableLoader.svelte";
import { DataFrame } from "dataframe-js";
import Schema from "@/lib/schemas.js";
import { onMount } from "svelte";
import { isaObj } from '@/stores/isa';
let study;
export { study as value };
export let jsonPath;
//FIXME: In Expert mode this is not working as it only checks on component creation, but not if the parameter value is changed later
//FIXME: Value is not updated directly in the isa-json when the component loads
onMount(()=>{
let growth_protocol = study.protocols.find(protocol => protocol.name === 'Growth');
if (study.processSequence.length > 0) {
let growth_process = study.processSequence.find(process => {
return process.executesProtocol.name === 'Growth'
})
if (growth_process) {
for (let parameterValue of growth_process.parameterValues) {
let protocol_parameter = growth_protocol.parameters.find(parameter => parameter.parameterName.annotationValue === parameterValue.category.parameterName.annotationValue)
if (protocol_parameter) {
parameterValue.value = protocol_parameter.comments[0].value;
}
}
}
})
//TODO: Add to config
let template = new Blob(["Art_calc,Jahr_calc,Sortiment_calc,BKR_Nr,BKR_Bezeichnung,Anbaugebiet_Nr,Anbaugebiete,Standort_calc,Bundeslandkuerzel,DATUM,BBCH_VON,BBCH,BBCH_BIS,Termin,Merkmal,MerkmalsBez,StufenNrFak1,StufenNrFak2,Wdh,Sorte,Status,KennNr,Wert"], { type: 'text/csv;charset=utf-8,' });
let mode: 'long'|'wide' = 'long';
let sample_key = 'Sorte,StufenNrFak1,StufenNrFak2,Wdh'
let source_key = 'Sorte';
let dataframe_keys = 'DATUM,BBCH_VON,BBCH,BBCH_BIS,Merkmal,Wert'
let characterisitcs = 'Anbaugebiete,Standort_calc,Jahr_calc,Art_calc';
let previewSize = 5;
function writeStudy(df){
let groupedDF = df
.groupBy(...source_key.split(','))
.toCollection();
study.materials.sources = [];
study.materials.samples = [];
study.processSequence = [];
groupedDF.forEach((source_group) => {
let source = Schema.getSource(
Object.values(source_group.groupKey)[0],
source_group.group.select(...characterisitcs.split(',')).toCollection()[0]
);
study.materials.sources = [...study.materials.sources, source];
source_group.group.groupBy(...sample_key.split(',')).toCollection().forEach((sample_group) => {
let sample = Schema.getObjectFromSchema('sample');
sample.name = Object.values(sample_group.groupKey).join('-');
sample.derivesFrom = [source];
study.materials.samples = [...study.materials.samples, sample];
});
});
let growth_protocol = study.protocols.find(protocol => protocol.name === 'Growth');
let process = Schema.getObjectFromSchema('process');
process.inputs = study.materials.sources;
process.executesProtocol = growth_protocol;
process.parameterValues = growth_protocol.parameters.map(parameter => {
let value = parameter.comments[0].value;
let process_parameter_value = Schema.getObjectFromSchema('process_parameter_value');
process_parameter_value.category = parameter;
process_parameter_value.value = value
return process_parameter_value;
});
process.outputs = study.materials.samples;
study.processSequence = [...study.processSequence, process];
}
function writeAssay(df) {
let phenotyping_protocol = Schema.getObjectFromSchema('protocol');
phenotyping_protocol.name = 'Phenotyping';
study.protocols = [study.protocols[0], phenotyping_protocol];
study.assays = [];
let assay = Schema.getObjectFromSchema('assay');
assay.materials.samples = study.materials.samples;
assay.dataFiles = [];
let datafileContent = Schema.getObjectFromSchema('comment');
datafileContent.name = 'Content';
datafileContent.value = JSON.stringify(
df.select(...sample_key.split(','), ...dataframe_keys.split(','))
.withColumn(
'Sample Name',
row => sample_key.split(',').map(key => row.get(key)).join('-')
)
.select('Sample Name', ...dataframe_keys.split(','))
.toCollection()
})
//TODO: Add to config
let template = new Blob(["Art_calc,Jahr_calc,Sortiment_calc,BKR_Nr,BKR_Bezeichnung,Anbaugebiet_Nr,Anbaugebiete,Standort_calc,Bundeslandkuerzel,DATUM,BBCH_VON,BBCH,BBCH_BIS,Termin,Merkmal,MerkmalsBez,StufenNrFak1,StufenNrFak2,Wdh,Sorte,Status,KennNr,Wert"], { type: 'text/csv;charset=utf-8,' });
let mode: 'long'|'wide' = 'long';
let sample_key = 'Sorte,StufenNrFak1,StufenNrFak2,Wdh'
let source_key = 'Sorte';
let dataframe_keys = 'DATUM,BBCH_VON,BBCH,BBCH_BIS,Merkmal,Wert'
let characterisitcs = 'Anbaugebiete,Standort_calc,Jahr_calc,Art_calc';
let previewSize = 5;
function writeStudy(df){
let groupedDF = df
.groupBy(...source_key.split(','))
.toCollection();
study.materials.sources = [];
study.materials.samples = [];
study.processSequence = [];
groupedDF.forEach((source_group) => {
let source = Schema.getSource(
Object.values(source_group.groupKey)[0],
source_group.group.select(...characterisitcs.split(',')).toCollection()[0]
);
study.materials.sources = [...study.materials.sources, source];
source_group.group.groupBy(...sample_key.split(',')).toCollection().forEach((sample_group) => {
let sample = Schema.getObjectFromSchema('sample');
sample.name = Object.values(sample_group.groupKey).join('-');
sample.derivesFrom = [source];
study.materials.samples = [...study.materials.samples, sample];
});
});
let growth_protocol = study.protocols.find(protocol => protocol.name === 'Growth');
let process = Schema.getObjectFromSchema('process');
process.inputs = study.materials.sources;
process.executesProtocol = growth_protocol;
process.parameterValues = growth_protocol.parameters.map(parameter => {
let value = parameter.comments[0].value;
let process_parameter_value = Schema.getObjectFromSchema('process_parameter_value');
process_parameter_value.category = parameter;
process_parameter_value.value = value
return process_parameter_value;
});
process.outputs = study.materials.samples;
study.processSequence = [...study.processSequence, process];
}
function writeAssay(df) {
let phenotyping_protocol = Schema.getObjectFromSchema('protocol');
phenotyping_protocol.name = 'Phenotyping';
study.protocols = [study.protocols[0], phenotyping_protocol];
let dataFile = Schema.getObjectFromSchema('data');
dataFile.name = 'phenotyping-results.csv';
dataFile.type = 'Raw Data File';
dataFile.comments = [datafileContent];
assay.dataFiles = [...assay.dataFiles, dataFile];
assay.processSequence = [];
let process = Schema.getObjectFromSchema('process');
process.inputs = study.materials.samples;
process.executesProtocol = phenotyping_protocol;
process.outputs = [dataFile];
assay.processSequence = [...assay.processSequence, process];
study.assays = [...study.assays, assay];
}
study.assays = [];
let assay = Schema.getObjectFromSchema('assay');
assay.materials.samples = study.materials.samples;
assay.dataFiles = [];
function handleApprove(event) {
let df = new DataFrame(event.detail.detail.rows, event.detail.detail.columns)
writeStudy(df);
writeAssay(df);
set($isaObj, jsonPath, study);
$isaObj = $isaObj;
}
let datafileContent = Schema.getObjectFromSchema('comment');
datafileContent.name = 'Content';
datafileContent.value = JSON.stringify(
df.select(...sample_key.split(','), ...dataframe_keys.split(','))
.withColumn(
'Sample Name',
row => sample_key.split(',').map(key => row.get(key)).join('-')
)
.select('Sample Name', ...dataframe_keys.split(','))
.toCollection()
);
let dataFile = Schema.getObjectFromSchema('data');
dataFile.name = 'phenotyping-results.csv';
dataFile.type = 'Raw Data File';
dataFile.comments = [datafileContent];
assay.dataFiles = [...assay.dataFiles, dataFile];
assay.processSequence = [];
let process = Schema.getObjectFromSchema('process');
process.inputs = study.materials.samples;
process.executesProtocol = phenotyping_protocol;
process.outputs = [dataFile];
assay.processSequence = [...assay.processSequence, process];
study.assays = [...study.assays, assay];
}
function handleApprove(event) {
let df = new DataFrame(event.detail.detail.rows, event.detail.detail.columns)
writeStudy(df);
writeAssay(df);
set($isaObj, jsonPath, study);
$isaObj = $isaObj;
}
</script>

<section>
Expand Down

0 comments on commit f5bb4f9

Please sign in to comment.