-clinical_data <- "select * from syn43278088"
+clinical_data <- "select * from syn43278088" # query when the table already contains just the releasable patients
ref_map <- "https://raw.githubusercontent.com/nf-osi/nf-metadata-dictionary/main/mappings/cBioPortal.yaml"
cbp_add_clinical(clinical_data, ref_map)
diff --git a/pkgdown.yml b/pkgdown.yml
index db0ed8d..a9cf8d1 100644
--- a/pkgdown.yml
+++ b/pkgdown.yml
@@ -7,4 +7,4 @@ articles:
bringing-portal-data-to-other-platforms-cbioportal: bringing-portal-data-to-other-platforms-cbioportal.html
revalidation-workflows: revalidation-workflows.html
survey-public-files: survey-public-files.html
-last_built: 2025-02-04T18:52Z
+last_built: 2025-02-13T19:55Z
diff --git a/search.json b/search.json
index 046ffb1..0f3d2a2 100644
--- a/search.json
+++ b/search.json
@@ -1 +1 @@
-[{"path":[]},{"path":"/CODE_OF_CONDUCT.html","id":"our-pledge","dir":"","previous_headings":"","what":"Our Pledge","title":"Contributor Covenant Code of Conduct","text":"members, contributors, leaders pledge make participation community harassment-free experience everyone, regardless age, body size, visible invisible disability, ethnicity, sex characteristics, gender identity expression, level experience, education, socio-economic status, nationality, personal appearance, race, religion, sexual identity orientation. pledge act interact ways contribute open, welcoming, diverse, inclusive, healthy community.","code":""},{"path":"/CODE_OF_CONDUCT.html","id":"our-standards","dir":"","previous_headings":"","what":"Our Standards","title":"Contributor Covenant Code of Conduct","text":"Examples behavior contributes positive environment community include: Demonstrating empathy kindness toward people respectful differing opinions, viewpoints, experiences Giving gracefully accepting constructive feedback Accepting responsibility apologizing affected mistakes, learning experience Focusing best just us individuals, overall community Examples unacceptable behavior include: use sexualized language imagery, sexual attention advances kind Trolling, insulting derogatory comments, personal political attacks Public private harassment Publishing others’ private information, physical email address, without explicit permission conduct reasonably considered inappropriate professional setting","code":""},{"path":"/CODE_OF_CONDUCT.html","id":"enforcement-responsibilities","dir":"","previous_headings":"","what":"Enforcement Responsibilities","title":"Contributor Covenant Code of Conduct","text":"Community leaders responsible clarifying enforcing standards acceptable behavior take appropriate fair corrective action response behavior deem inappropriate, threatening, offensive, harmful. Community leaders right responsibility remove, edit, reject comments, commits, code, wiki edits, issues, contributions aligned Code Conduct, communicate reasons moderation decisions appropriate.","code":""},{"path":"/CODE_OF_CONDUCT.html","id":"scope","dir":"","previous_headings":"","what":"Scope","title":"Contributor Covenant Code of Conduct","text":"Code Conduct applies within community spaces, also applies individual officially representing community public spaces. Examples representing community include using official e-mail address, posting via official social media account, acting appointed representative online offline event.","code":""},{"path":"/CODE_OF_CONDUCT.html","id":"enforcement","dir":"","previous_headings":"","what":"Enforcement","title":"Contributor Covenant Code of Conduct","text":"Instances abusive, harassing, otherwise unacceptable behavior may reported community leaders responsible enforcement [INSERT CONTACT METHOD]. complaints reviewed investigated promptly fairly. community leaders obligated respect privacy security reporter incident.","code":""},{"path":"/CODE_OF_CONDUCT.html","id":"enforcement-guidelines","dir":"","previous_headings":"","what":"Enforcement Guidelines","title":"Contributor Covenant Code of Conduct","text":"Community leaders follow Community Impact Guidelines determining consequences action deem violation Code Conduct:","code":""},{"path":"/CODE_OF_CONDUCT.html","id":"id_1-correction","dir":"","previous_headings":"Enforcement Guidelines","what":"1. Correction","title":"Contributor Covenant Code of Conduct","text":"Community Impact: Use inappropriate language behavior deemed unprofessional unwelcome community. Consequence: private, written warning community leaders, providing clarity around nature violation explanation behavior inappropriate. public apology may requested.","code":""},{"path":"/CODE_OF_CONDUCT.html","id":"id_2-warning","dir":"","previous_headings":"Enforcement Guidelines","what":"2. Warning","title":"Contributor Covenant Code of Conduct","text":"Community Impact: violation single incident series actions. Consequence: warning consequences continued behavior. interaction people involved, including unsolicited interaction enforcing Code Conduct, specified period time. includes avoiding interactions community spaces well external channels like social media. Violating terms may lead temporary permanent ban.","code":""},{"path":"/CODE_OF_CONDUCT.html","id":"id_3-temporary-ban","dir":"","previous_headings":"Enforcement Guidelines","what":"3. Temporary Ban","title":"Contributor Covenant Code of Conduct","text":"Community Impact: serious violation community standards, including sustained inappropriate behavior. Consequence: temporary ban sort interaction public communication community specified period time. public private interaction people involved, including unsolicited interaction enforcing Code Conduct, allowed period. Violating terms may lead permanent ban.","code":""},{"path":"/CODE_OF_CONDUCT.html","id":"id_4-permanent-ban","dir":"","previous_headings":"Enforcement Guidelines","what":"4. Permanent Ban","title":"Contributor Covenant Code of Conduct","text":"Community Impact: Demonstrating pattern violation community standards, including sustained inappropriate behavior, harassment individual, aggression toward disparagement classes individuals. Consequence: permanent ban sort public interaction within community.","code":""},{"path":"/CODE_OF_CONDUCT.html","id":"attribution","dir":"","previous_headings":"","what":"Attribution","title":"Contributor Covenant Code of Conduct","text":"Code Conduct adapted Contributor Covenant, version 2.0, available https://www.contributor-covenant.org/version/2/0/ code_of_conduct.html. Community Impact Guidelines inspired Mozilla’s code conduct enforcement ladder. answers common questions code conduct, see FAQ https://www.contributor-covenant.org/faq. Translations available https:// www.contributor-covenant.org/translations.","code":""},{"path":"/LICENSE.html","id":null,"dir":"","previous_headings":"","what":"MIT License","title":"MIT License","text":"Copyright (c) 2021 Robert Allaway Permission hereby granted, free charge, person obtaining copy software associated documentation files (“Software”), deal Software without restriction, including without limitation rights use, copy, modify, merge, publish, distribute, sublicense, /sell copies Software, permit persons Software furnished , subject following conditions: copyright notice permission notice shall included copies substantial portions Software. SOFTWARE PROVIDED “”, WITHOUT WARRANTY KIND, EXPRESS IMPLIED, INCLUDING LIMITED WARRANTIES MERCHANTABILITY, FITNESS PARTICULAR PURPOSE NONINFRINGEMENT. EVENT SHALL AUTHORS COPYRIGHT HOLDERS LIABLE CLAIM, DAMAGES LIABILITY, WHETHER ACTION CONTRACT, TORT OTHERWISE, ARISING , CONNECTION SOFTWARE USE DEALINGS SOFTWARE.","code":""},{"path":"/articles/annotate-data-intro.html","id":"intro","dir":"Articles","previous_headings":"","what":"Intro","title":"Introduction to utils for annotating data","text":"introduces annotation utilities typical examples. expected useful starting point using nfportalutils annotation tasks, followed specialized vignetted annotating NF processed data needed.","code":""},{"path":"/articles/annotate-data-intro.html","id":"set-up","dir":"Articles","previous_headings":"Intro","what":"Set up","title":"Introduction to utils for annotating data","text":"","code":"library(nfportalutils) syn_login() # Change this to a dev project you have access to PROJECT <- \"syn26462036\""},{"path":"/articles/annotate-data-intro.html","id":"set-annotations-on-a-single-file","dir":"Articles","previous_headings":"Intro","what":"Set annotations on a single file","title":"Introduction to utils for annotating data","text":"Create demo entity. set_annotations can used add new annotations correct existing annotation entity. wraps Python client make intuitive pass R list annotations . , add another annotation correct favorites “chocolate”. returned data shows unchanged foo, updated favorites, new n. Cleanup.","code":"synapseclient <- reticulate::import(\"synapseclient\") # Create an entity with some initial annotations entity <- synapseclient$Folder(\"Demo Entity\", parent = PROJECT, annotations = list(foo = \"bar\", favorites = c(\"raindrops\", \"whiskers\"))) entity <- .syn$store(entity) set_annotations(id = entity$properties$id, annotations = list(favorites = \"chocolate\", n = 7L)) .syn$delete(entity)"},{"path":"/articles/annotate-data-intro.html","id":"annotate-in-batch-using-a-manifest","dir":"Articles","previous_headings":"Intro","what":"Annotate in batch using a manifest","title":"Introduction to utils for annotating data","text":"better way use set_annotations set entities, usually files. First create multiple entities need annotated corrected batch. Create example manifest. Note: Another way includes reading shematic csv manifest entityIds Filenames. Apply: Cleanup.","code":"objs <- make_folder(parent = PARENT_TEST_PROJECT, folders = c(\"mock_file_1\", \"mock_file_2\", \"mock_file_3\")) ids <- sapply(objs, function(x) x$properties$id) manifest <- data.table( entityId = ids, assay = \"drugScreen\", experimentalTimepoint = c(1L, 3L, 7L), experimentalTimepointUnit = \"days\", cellType = list(c(\"schwann\", \"macrophage\"), c(\"schwann\", \"macrophage\"), c(\"schwann\", \"macrophage\")) ) manifest annotate_with_manifest(manifest) for (id in ids) .syn$delete(id)"},{"path":[]},{"path":"/articles/annotate-nf-processed-data.html","id":"purpose","dir":"Articles","previous_headings":"Intro","what":"Purpose","title":"Annotating nextflow processed data","text":"vignette documents -practice usage annotation utils nf-processed data files. outputs : 1. metadata manifest processed dataset can validated schematic submission. 2. Provenance meta. Typically, inspected/validated submitting Synapse final followup step. Examples can run READ access processed outputs, requires DOWNLOAD access local copy input samplesheet. actually apply annotations course requires EDIT access.","code":""},{"path":"/articles/annotate-nf-processed-data.html","id":"general-idea","dir":"Articles","previous_headings":"Intro","what":"General idea","title":"Annotating nextflow processed data","text":"nextflow workflow generates different types outputs along steps workflow (see figure ). steps/stops, products can collected packaged “level 2, 3, 4” datasets. example, .bam/bai outputs SAMtools represent “level 2” semi-processed dataset dataType AlignedReads. Ideally, simply like point main folder containing processed output files get back list manifests represent useful dataset products workflow. (“useful”, selectable data product encoded annotation workflow.) manifests can used annotate files well creation Synapse Datasets.","code":""},{"path":"/articles/annotate-nf-processed-data.html","id":"set-up","dir":"Articles","previous_headings":"","what":"Set up","title":"Annotating nextflow processed data","text":"First load nfportalutils package log . recommended default usage syn_login use without directly passing credentials. Instead, available SYNAPSE_AUTH_TOKEN environment variable token stored therein.","code":"library(nfportalutils) library(data.table) syn_login()"},{"path":"/articles/annotate-nf-processed-data.html","id":"steps","dir":"Articles","previous_headings":"Set up","what":"Steps","title":"Annotating nextflow processed data","text":"general annotation workflow steps : 1. Parse input samplesheet. 2. Get basic context processed outputs workflow run. none indexed-back output files annotations, first construct initial info sample, caller, etc. good ol’ folder hierarchy file names. 3. Now sample workflow context least, link input-output data appropriately, check sample correspondence, get format expected downstream. 4. Transfer meta input output processed files (important individualID, basic individual attributes, assay). 5. Set annotations processed data type based workflow default rules. potential issues noted: input files missing incorrect annotations, processed files missing incorrect annotations. sample ids information updated original raw input files, data must reannotated rerunning pipeline. Anything deviates relatively standard workflow run, leading changes locations naming outputs, might yield poor results require manual composition steps. Standard organization naming files important.","code":""},{"path":[]},{"path":"/articles/annotate-nf-processed-data.html","id":"what-does-output-look-like","dir":"Articles","previous_headings":"nf-rnaseq","what":"What does output look like?","title":"Annotating nextflow processed data","text":"Use ?map_sample_output_rnaseq see outputs handled parameter output. note depending workflow run data indexed back Synapse, actual output availability may differ. projects, bam/bai files may even indexed back Synapse. illustrative example, workflow outputs include featureCounts:","code":"syn_out <- \"syn57382909\" fileview <- \"syn11601495\" o <- map_sample_output_rnaseq(syn_out, fileview) # check outputs only names(o)"},{"path":"/articles/annotate-nf-processed-data.html","id":"what-does-input-look-like","dir":"Articles","previous_headings":"nf-rnaseq","what":"What does input look like?","title":"Annotating nextflow processed data","text":"Like output, input just another index files actually samplesheet used workflow know files process. Samplesheets public placed pipeline_info directory part workflow (time). IMPORTANT: samplesheet needs standard enough parse correctly, .e. extract valid file Synapse ids first fastq. use helper parse samplesheets two workflows (RNA-seq Sarek), function best handle slight variations samplesheet formats. examples work vs : ✔ OK. Excerpt real samplesheet syn51525432. ✔ OK. Excerpt real samplesheet syn63172939. ✖ . Adapted real samplesheet syn63172939. give error “x6” valid Synapse ID. manually corrected samplesheet provided.","code":"#> sample fastq_1 fastq_2 strandedness #> 1 JH-2-019-DB5EH-C461C syn15261791 syn15261900 auto #> 2 JH-2-007-B14BB-AG2A6 syn15261974 syn15262033 auto #> 3 JH-2-009-518B9-77BH3 syn15262157 syn15262216 auto #> subject sex status sample lane fastq1 fastq2 #> 1 JHU002 XY 1 JHU002-043 JHU002-043-Lane-1 syn://syn22091879 NA #> 2 JHU002 XY 1 JHU002-048 JHU002-048-Lane-1 syn://syn22091925 NA #> 3 JHU023 XY 1 JHU023-044 JHU023-044-Lane-1 syn://syn22091973 NA #> datasetId projectId output_destination_id Germline Somatic #> 1 syn29783617 syn11638893 syn29429576 Y NA #> 2 syn29783617 syn11638893 syn29429576 Y NA #> 3 syn29783617 syn11638893 syn29429576 Y NA #> sample single_end #> 1 patient10tumor1_T1 0 #> 2 patient10tumor2_T1 0 #> 3 patient10tumor3_T1 0 #> fastq_1 #> 1 s3://some-tower-bucket/syn40134517/x6/SL106309_1.fastq.gz #> 2 s3://some-tower-bucket/syn40134517/syn7989846/SL106310_1.fastq.gz #> 3 s3://some-tower-bucket/syn40134517/syn7989852/SL106311_1.fastq.gz #> fastq_2 #> 1 s3://some-tower-bucket/syn40134517/syn7989839/SL106309_2.fastq.gz #> 2 s3://some-tower-bucket/syn40134517/syn7989847/SL106310_2.fastq.gz #> 3 s3://some-tower-bucket/syn40134517/syn7989856/SL106311_2.fastq.gz #> strandedness #> 1 auto #> 2 auto #> 3 auto"},{"path":"/articles/annotate-nf-processed-data.html","id":"connecting-input-and-output-to-automate-filled-manifests","dir":"Articles","previous_headings":"nf-rnaseq","what":"Connecting input and output to automate filled manifests","title":"Annotating nextflow processed data","text":"contrast previous example, run example output directory types outputs ’re looking nf-rnaseq workflow. used rest demo. (Review source code processed_meta see steps encapsulated.) Generate manifests inspect example result:","code":"samplesheet <- \"syn51408030\" syn_out <- \"syn51476810\" fileview <- \"syn11601481\" wf_link <- \"https://nf-co.re/rnaseq/3.11.2/output#star-and-salmon\" input <- map_sample_input_ss(samplesheet) # Alternatively, use a local file if not on Synapse: # input <- map_sample_input_ss(\"~/work/samplesheet.csv\") output <- map_sample_output_rnaseq(syn_out, fileview) names(output) meta <- processed_meta(input, output, workflow_link = wf_link) head(meta$manifests$SAMtools)"},{"path":"/articles/annotate-nf-processed-data.html","id":"submit-manifest","dir":"Articles","previous_headings":"nf-rnaseq","what":"Submit manifest","title":"Annotating nextflow processed data","text":"Manifests can submitted schematic-compatible using annotate_with_manifest shown .","code":"mannifest_1 <- meta$manifests$SAMtools annotate_with_manifest(manifest_1)"},{"path":"/articles/annotate-nf-processed-data.html","id":"add-provenance","dir":"Articles","previous_headings":"nf-rnaseq","what":"Add provenance","title":"Annotating nextflow processed data","text":"Provenance basically annotation, though treated somewhat differently Synapse. result meta object, something called sample_io can provided add_activity_batch add provenance. “Workflow” provides general name activity, “workflow link” provides persistent reference version/part workflow, others can follow link get details.","code":"sample_io <- meta$sample_io prov <- add_activity_batch(sample_io$output_id, sample_io$workflow, wf_link, sample_io$input_id)"},{"path":"/articles/annotate-nf-processed-data.html","id":"create-dataset","dir":"Articles","previous_headings":"nf-rnaseq","what":"Create dataset","title":"Annotating nextflow processed data","text":"create Synapse Dataset:","code":"items <- manifest_1$entityId project <- \"your-dev-project-synid\" dataset_1 <- new_dataset(name = \"STAR Salmon Gene Expression Quantification from RNA-seq\", parent = project, items = items, dry_run = FALSE)"},{"path":"/articles/annotate-nf-processed-data.html","id":"nf-sarek","dir":"Articles","previous_headings":"","what":"nf-sarek","title":"Annotating nextflow processed data","text":"difference usage map_sample_output_sarek Step 2.","code":"samplesheet <- \"syn38793905\" # samplesheet can be stored on Synapse or locally syn_out <- \"syn27650634\" fileview <- \"syn13363852\" i <- map_sample_input_ss(samplesheet) #1 o <- map_sample_output_sarek(syn_out, fileview) #2 sarek_meta <- processed_meta(i, o, workflow_link = \"test\") # View first manifest sarek_meta$manifests$Strelka2"},{"path":"/articles/annotate-nf-processed-data.html","id":"add-provenance-1","dir":"Articles","previous_headings":"nf-sarek","what":"Add provenance","title":"Annotating nextflow processed data","text":"Use manifest add provenance. provenance, rest workflow manifest submission creating datasets like nf-rnaseq example.","code":"wf_link <- c(FreeBayes = \"https://nf-co.re/sarek/3.2.3/output#freebayes\", Mutect2 = \"https://nf-co.re/sarek/3.2.3/output#gatk-mutect2\", Strelka2 = \"https://nf-co.re/sarek/3.2.3/output#strelka2\") sample_io <- sarek_meta$sample_io add_activity_batch(sample_io$output_id, sample_io$workflow, wf_link[sample_io$workflow], sample_io$input_id)"},{"path":"/articles/bringing-portal-data-to-other-platforms-cbioportal.html","id":"special-acknowledgments","dir":"Articles","previous_headings":"","what":"Special acknowledgments","title":"Bringing Portal Data to Other Platforms: cBioPortal","text":"Functionality demonstrated vignette benefited greatly code originally written hhunterzinck.","code":""},{"path":"/articles/bringing-portal-data-to-other-platforms-cbioportal.html","id":"intro","dir":"Articles","previous_headings":"","what":"Intro","title":"Bringing Portal Data to Other Platforms: cBioPortal","text":"describes package Synapse processed data cBioPortal study dataset. cBioPortal study contains one data types, see cBioPortal docs. current API covers creating cBioPortal study subset data types relevant NF workflow (data types). design inspired feel somewhat like working R package usethis, data types can added study package interactively. Though checking depending data type, final validation official cBioPortal validation tools/scripts still run. Breaking changes possible API still development.","code":""},{"path":"/articles/bringing-portal-data-to-other-platforms-cbioportal.html","id":"set-up","dir":"Articles","previous_headings":"","what":"Set up","title":"Bringing Portal Data to Other Platforms: cBioPortal","text":"First load nfportalutils package log . recommended default usage syn_login use without directly passing credentials. Instead, available SYNAPSE_AUTH_TOKEN environment variable token stored therein.","code":"library(nfportalutils) syn_login()"},{"path":"/articles/bringing-portal-data-to-other-platforms-cbioportal.html","id":"create-a-new-study-dataset","dir":"Articles","previous_headings":"","what":"Create a new study dataset","title":"Bringing Portal Data to Other Platforms: cBioPortal","text":"First create study dataset “package” can put together data. study dataset combines multiple data types – clinical, gene expression, gene variants, etc.","code":"cbp_new_study(cancer_study_identifier = \"npst_nfosi_ntap_2022\", name = \"Plexiform Neurofibroma and Neurofibroma (Pratilas 2022)\", citation = \"TBD\")"},{"path":"/articles/bringing-portal-data-to-other-platforms-cbioportal.html","id":"add-data-types-to-study","dir":"Articles","previous_headings":"","what":"Add data types to study","title":"Bringing Portal Data to Other Platforms: cBioPortal","text":"Data types can easily added order using cbp_add* functions. functions download data files create meta . Note : run working directory set study dataset directory set ensure consistent metadata. Defaults known NF-OSI processed data outputs. defaults don’t apply changes scenario, take look lower-level utils make_meta_* edit files manually . Data types can vary much additional work needed remapping, reformatting, custom sanity checks, etc.","code":""},{"path":"/articles/bringing-portal-data-to-other-platforms-cbioportal.html","id":"add-mutations-data","dir":"Articles","previous_headings":"Add data types to study","what":"Add mutations data","title":"Bringing Portal Data to Other Platforms: cBioPortal","text":"maf_data references final merged maf output file NF-OSI processing pipeline OK public release. data file type requires modifications except renaming.","code":"maf_data <- \"syn36553188\" add_cbp_maf(maf_data)"},{"path":"/articles/bringing-portal-data-to-other-platforms-cbioportal.html","id":"add-copy-number-alterations-cna-data","dir":"Articles","previous_headings":"Add data types to study","what":"Add copy number alterations (CNA) data","title":"Bringing Portal Data to Other Platforms: cBioPortal","text":"cna_data expected .seg file Synapse.","code":"cna_data <- \"syn********\" cbp_add_cna(cna_data)"},{"path":"/articles/bringing-portal-data-to-other-platforms-cbioportal.html","id":"add-expression-data","dir":"Articles","previous_headings":"Add data types to study","what":"Add expression data","title":"Bringing Portal Data to Other Platforms: cBioPortal","text":"expression_data expected .txt called gene_tpm.tsv file Synapse. NF-OSI default includes including raw expression data well, called gene_counts.tsv, can omitted. NF-OSI outputs somewhat modified translation required headers.","code":"mrna_data <- \"syn********\" mrna_data_raw <- \"syn********\" cbp_add_expression(mrna_data, expression_data_raw = mrna_data_raw)"},{"path":"/articles/bringing-portal-data-to-other-platforms-cbioportal.html","id":"add-clinical-data","dir":"Articles","previous_headings":"Add data types to study","what":"Add clinical data","title":"Bringing Portal Data to Other Platforms: cBioPortal","text":"clinical_data prepared clinical data table already subsetted released study, pass query can used subsetting using full clinical database table. example, full clinical cohort comprises patients 1-50, study dataset consists available releasable data patients 1-20 expression data data patients 15-20 cna data. , clinical_data can smaller table just 1-30, can original table pass suitable additional filter, e.g. release = 'batch1'. Clinical data requires mapping consistent public datasets possible. ref_map defines mapping clinical variables NF-OSI data dictionary cBioPortal’s. variables mapping exported cBioPortal. Follow link inspect default file format used. Clinical data added last overall sample checks work. example, expression data patients 1-20 cna data patients 15-20, can informatively warn missing/mismatches.","code":"clinical_data <- \"select * from syn43278088\" ref_map <- \"https://raw.githubusercontent.com/nf-osi/nf-metadata-dictionary/main/mappings/cBioPortal.yaml\" cbp_add_clinical(clinical_data, ref_map)"},{"path":"/articles/bringing-portal-data-to-other-platforms-cbioportal.html","id":"validation","dir":"Articles","previous_headings":"","what":"Validation","title":"Bringing Portal Data to Other Platforms: cBioPortal","text":"additional steps generating case lists validation done outside package cBioPortal backend, portal may specific configurations (genomic reference) validate . See general docs dataset validation. public portal, suggested step using public server given . Assuming present working directory ~/datahub/public study folder called npst_nfosi_ntap_2022 placed , mount dataset container run validation like: html report list issues data types help corrections needed.","code":"STUDY=npst_nfosi_ntap_2022 sudo docker run --rm -v $(pwd):/datahub cbioportal/cbioportal:5.4.7 validateStudies.py -d /datahub -l $STUDY -u http://cbioportal.org -html /datahub/$STUDY/html_report"},{"path":"/articles/revalidation-workflows.html","id":"basics-with-schematic-api-service","dir":"Articles","previous_headings":"","what":"Basics with Schematic API service","title":"Revalidation workflows","text":"Schematic API works dataset folders currently. Find dataset folder. validate metadata, manifest must reconstituted. Type ?manifest_generate read docs. seen params , need know data_type validate . data_type “Component” schematic data model (exact term depends data model). feeling lucky, try infer_data_type. Go google_sheet download .csv. Excel chosen, open spreadsheet editor resave file .csv. validate. Make corrections .csv according validation laundry list. Submit corrected manifest via DCA.","code":"my_dataset <- \"syn25386362\" inferred <- infer_data_type(my_dataset) inferred #> $result #> [1] \"GenomicsAssayTemplate\" #> #> $notes #> [1] \"\" data_type <- inferred$result manifest_generate(data_type, dataset_id = my_dataset, schema_url = \"https://raw.githubusercontent.com/nf-osi/nf-metadata-dictionary/main/NF.jsonld\", output_format = \"google_sheet\") # otherwise excel #> Manifest generated as Googlesheet(s) #> [[1]] #> [1] \"https://docs.google.com/spreadsheets/d/11ymlnESzn7XhHS3vHzlRFsDPhafJJehlhuCL9HgICt8\" manifest_validate(data_type = data_type, file_name = \"GenomicsAssayTemplate - Sheet1.csv\") #> $errors #> $errors[[1]] #> $errors[[1]][[1]] #> [1] \"2\" #> #> $errors[[1]][[2]] #> [1] \"assay\" #> #> $errors[[1]][[3]] #> [1] \"'' is not one of ['NIH Toolbox', 'STR profile', 'traction force microscopy', 'massively parallel reporter assay', 'gait measurement', 'conventional MRI', 'functional MRI', 'immunoassay', 'contextual conditioning behavior assay', 'genotyping', 'DNA optical mapping', 'NOMe-seq', 'Social Responsiveness Scale', 'targeted exome sequencing', '2D AlamarBlue fluorescence', 'TMT quantitation', 'liquid chromatography-electrochemical detection', 'whole genome sequencing', 'Riccardi and Ablon scales', 'cell\" #> #> $errors[[1]][[4]] #> [1] \"\" #> #> #> $errors[[2]] #> $errors[[2]][[1]] #> [1] \"2\" #> #> $errors[[2]][[2]] #> [1] \"specimenID\" #> #> $errors[[2]][[3]] #> [1] \"'' is too short\" #> #> $errors[[2]][[4]] #> [1] \"\" #> #> #> $errors[[3]] #> $errors[[3]][[1]] #> [1] \"2\" #> #> $errors[[3]][[2]] #> [1] \"libraryStrand\" #> #> $errors[[3]][[3]] #> [1] \"'' is not one of ['FirstStranded', 'Unstranded', 'Not Applicable', 'SecondStranded']\" #> #> $errors[[3]][[4]] #> [1] \"\" #> #> #> $errors[[4]] #> $errors[[4]][[1]] #> [1] \"2\" #> #> $errors[[4]][[2]] #> [1] \"tumorType\" #> #> $errors[[4]][[3]] #> [1] \"'' is not one of ['Anaplastic Ganglioglioma', 'Anaplastic Astrocytoma', 'Nodular Neurofibroma', 'Meningioma', 'Fibrosarcoma', 'Localized Neurofibroma', 'Glioblastoma', 'Malignant Peripheral Nerve Sheath Tumor', 'Anaplastic Pleomorphic Xanthoastrocytoma', 'Atypical Neurofibroma', 'tumor', 'Colorectal Adenocarcinoma', 'Recurrent MPNST', 'Pilocytic Astrocytoma', 'Ganglioglioma', 'Optic Pathway Glioma', 'Neurofibroma', 'Necrotic Neoplasm', 'Glioma', 'Teratoma', 'Cutaneous Neurofibroma', 'Fibromatosi\" #> #> $errors[[4]][[4]] #> [1] \"\" #> #> #> $errors[[5]] #> $errors[[5]][[1]] #> [1] \"2\" #> #> $errors[[5]][[2]] #> [1] \"libraryPreparationMethod\" #> #> $errors[[5]][[3]] #> [1] \"'' is not one of ['CEL-seq', 'NEBNext mRNA Library Prep Reagent Set for Illumina', '10x', 'GTAC@WUSTL in-house prep', 'KAPA mRNA HyperPrep Kit', 'TruSeq', 'unknown', 'KAPA HyperPrep Kit PCR-free', 'Illumina TruSeq DNA Nano', 'TruSeq standard total RNA library kit', 'QuantSeq FWD V2 with UDI', 'Drop-Seq', 'KAPA RNA HyperPrep Kit with RiboErase (HMR)', 'Smart-seq4', 'IDT xGen Exome Research Panel', 'Smart-seq2', 'Omni-ATAC']\" #> #> $errors[[5]][[4]] #> [1] \"\" #> #> #> $errors[[6]] #> $errors[[6]][[1]] #> [1] \"2\" #> #> $errors[[6]][[2]] #> [1] \"individualID\" #> #> $errors[[6]][[3]] #> [1] \"'' is too short\" #> #> $errors[[6]][[4]] #> [1] \"\" #> #> #> $errors[[7]] #> $errors[[7]][[1]] #> [1] \"2\" #> #> $errors[[7]][[2]] #> [1] \"platform\" #> #> $errors[[7]][[3]] #> [1] \"'' is not one of ['Illumina Genome Analyzer IIx', 'Illumina HiSeq X', 'Perlegen 300Karray', 'Vevo 3100 Imaging System', 'Illumina MouseWG-6 v2.0 expression beadchip', 'Illumina Infinium MethylationEPIC BeadChip v2.0 (935k)', 'Vectra H1 3D Imaging System', 'Nanostring Counter', 'Illumina Infinium MethylationEPIC BeadChip v1.0 (850k)', 'Illumina HumanOmniExpress-24 v1.0 BeadChip', 'Illumina HumanOmni1-Quadv1.0', 'LifeViz Micro System', 'LI-COR Odyssey CLx', 'Illumina HumanMethylation450', 'Illumin\" #> #> $errors[[7]][[4]] #> [1] \"\" #> #> #> $errors[[8]] #> $errors[[8]][[1]] #> [1] \"2\" #> #> $errors[[8]][[2]] #> [1] \"specimenPreparationMethod\" #> #> $errors[[8]][[3]] #> [1] \"'' is not one of ['FFPE', 'OCT', 'RNAlater', 'Viably frozen', 'Fresh collected', 'Cryopreserved', 'formalin-fixed', 'Flash frozen', 'ethanol']\" #> #> $errors[[8]][[4]] #> [1] \"\" #> #> #> $errors[[9]] #> $errors[[9]][[1]] #> [1] \"2\" #> #> $errors[[9]][[2]] #> [1] \"species\" #> #> $errors[[9]][[3]] #> [1] \"'' is not one of ['Rattus norvegicus', 'Gallus gallus', 'Danio rerio', 'Sus scrofa', 'Drosophila melanogaster', 'Oryctolagus cuniculus', 'Pan troglodytes', 'Rhesus macaque', 'Mus musculus (humanized)', 'Homo sapiens', 'Mus musculus']\" #> #> $errors[[9]][[4]] #> [1] \"\" #> #> #> $errors[[10]] #> $errors[[10]][[1]] #> [1] \"3\" #> #> $errors[[10]][[2]] #> [1] \"assay\" #> #> $errors[[10]][[3]] #> [1] \"'' is not one of ['NIH Toolbox', 'STR profile', 'traction force microscopy', 'massively parallel reporter assay', 'gait measurement', 'conventional MRI', 'functional MRI', 'immunoassay', 'contextual conditioning behavior assay', 'genotyping', 'DNA optical mapping', 'NOMe-seq', 'Social Responsiveness Scale', 'targeted exome sequencing', '2D AlamarBlue fluorescence', 'TMT quantitation', 'liquid chromatography-electrochemical detection', 'whole genome sequencing', 'Riccardi and Ablon scales', 'cell\" #> #> $errors[[10]][[4]] #> [1] \"\" #> #> #> $errors[[11]] #> $errors[[11]][[1]] #> [1] \"3\" #> #> $errors[[11]][[2]] #> [1] \"specimenID\" #> #> $errors[[11]][[3]] #> [1] \"'' is too short\" #> #> $errors[[11]][[4]] #> [1] \"\" #> #> #> $errors[[12]] #> $errors[[12]][[1]] #> [1] \"3\" #> #> $errors[[12]][[2]] #> [1] \"libraryStrand\" #> #> $errors[[12]][[3]] #> [1] \"'' is not one of ['FirstStranded', 'Unstranded', 'Not Applicable', 'SecondStranded']\" #> #> $errors[[12]][[4]] #> [1] \"\" #> #> #> $errors[[13]] #> $errors[[13]][[1]] #> [1] \"3\" #> #> $errors[[13]][[2]] #> [1] \"tumorType\" #> #> $errors[[13]][[3]] #> [1] \"'' is not one of ['Anaplastic Ganglioglioma', 'Anaplastic Astrocytoma', 'Nodular Neurofibroma', 'Meningioma', 'Fibrosarcoma', 'Localized Neurofibroma', 'Glioblastoma', 'Malignant Peripheral Nerve Sheath Tumor', 'Anaplastic Pleomorphic Xanthoastrocytoma', 'Atypical Neurofibroma', 'tumor', 'Colorectal Adenocarcinoma', 'Recurrent MPNST', 'Pilocytic Astrocytoma', 'Ganglioglioma', 'Optic Pathway Glioma', 'Neurofibroma', 'Necrotic Neoplasm', 'Glioma', 'Teratoma', 'Cutaneous Neurofibroma', 'Fibromatosi\" #> #> $errors[[13]][[4]] #> [1] \"\" #> #> #> $errors[[14]] #> $errors[[14]][[1]] #> [1] \"3\" #> #> $errors[[14]][[2]] #> [1] \"libraryPreparationMethod\" #> #> $errors[[14]][[3]] #> [1] \"'' is not one of ['CEL-seq', 'NEBNext mRNA Library Prep Reagent Set for Illumina', '10x', 'GTAC@WUSTL in-house prep', 'KAPA mRNA HyperPrep Kit', 'TruSeq', 'unknown', 'KAPA HyperPrep Kit PCR-free', 'Illumina TruSeq DNA Nano', 'TruSeq standard total RNA library kit', 'QuantSeq FWD V2 with UDI', 'Drop-Seq', 'KAPA RNA HyperPrep Kit with RiboErase (HMR)', 'Smart-seq4', 'IDT xGen Exome Research Panel', 'Smart-seq2', 'Omni-ATAC']\" #> #> $errors[[14]][[4]] #> [1] \"\" #> #> #> $errors[[15]] #> $errors[[15]][[1]] #> [1] \"3\" #> #> $errors[[15]][[2]] #> [1] \"individualID\" #> #> $errors[[15]][[3]] #> [1] \"'' is too short\" #> #> $errors[[15]][[4]] #> [1] \"\" #> #> #> $errors[[16]] #> $errors[[16]][[1]] #> [1] \"3\" #> #> $errors[[16]][[2]] #> [1] \"platform\" #> #> $errors[[16]][[3]] #> [1] \"'' is not one of ['Illumina Genome Analyzer IIx', 'Illumina HiSeq X', 'Perlegen 300Karray', 'Vevo 3100 Imaging System', 'Illumina MouseWG-6 v2.0 expression beadchip', 'Illumina Infinium MethylationEPIC BeadChip v2.0 (935k)', 'Vectra H1 3D Imaging System', 'Nanostring Counter', 'Illumina Infinium MethylationEPIC BeadChip v1.0 (850k)', 'Illumina HumanOmniExpress-24 v1.0 BeadChip', 'Illumina HumanOmni1-Quadv1.0', 'LifeViz Micro System', 'LI-COR Odyssey CLx', 'Illumina HumanMethylation450', 'Illumin\" #> #> $errors[[16]][[4]] #> [1] \"\" #> #> #> $errors[[17]] #> $errors[[17]][[1]] #> [1] \"3\" #> #> $errors[[17]][[2]] #> [1] \"specimenPreparationMethod\" #> #> $errors[[17]][[3]] #> [1] \"'' is not one of ['FFPE', 'OCT', 'RNAlater', 'Viably frozen', 'Fresh collected', 'Cryopreserved', 'formalin-fixed', 'Flash frozen', 'ethanol']\" #> #> $errors[[17]][[4]] #> [1] \"\" #> #> #> $errors[[18]] #> $errors[[18]][[1]] #> [1] \"3\" #> #> $errors[[18]][[2]] #> [1] \"species\" #> #> $errors[[18]][[3]] #> [1] \"'' is not one of ['Rattus norvegicus', 'Gallus gallus', 'Danio rerio', 'Sus scrofa', 'Drosophila melanogaster', 'Oryctolagus cuniculus', 'Pan troglodytes', 'Rhesus macaque', 'Mus musculus (humanized)', 'Homo sapiens', 'Mus musculus']\" #> #> $errors[[18]][[4]] #> [1] \"\" #> #> #> #> $warnings #> $warnings[[1]] #> $warnings[[1]][[1]] #> [1] \"2\" #> #> $warnings[[1]][[2]] #> [1] \"age\" #> #> $warnings[[1]][[3]] #> [1] \"On row 2 the attribute age does not contain the proper value type num.\" #> #> $warnings[[1]][[4]] #> [1] \"\" #> #> #> $warnings[[2]] #> $warnings[[2]][[1]] #> [1] \"3\" #> #> $warnings[[2]][[2]] #> [1] \"age\" #> #> $warnings[[2]][[3]] #> [1] \"On row 3 the attribute age does not contain the proper value type num.\" #> #> $warnings[[2]][[4]] #> [1] \"\" #> #> #> $warnings[[3]] #> $warnings[[3]][[1]] #> $warnings[[3]][[1]][[1]] #> [1] \"2\" #> #> $warnings[[3]][[1]][[2]] #> [1] \"3\" #> #> #> $warnings[[3]][[2]] #> [1] \"readPair\" #> #> $warnings[[3]][[3]] #> [1] \"readPair values in rows ['2', '3'] are out of the specified range.\" #> #> $warnings[[3]][[4]] #> $warnings[[3]][[4]][[1]] #> [1] \"\" #> #> #> #> $warnings[[4]] #> $warnings[[4]][[1]] #> [1] \"2\" #> #> $warnings[[4]][[2]] #> [1] \"readLength\" #> #> $warnings[[4]][[3]] #> [1] \"On row 2 the attribute readLength does not contain the proper value type int.\" #> #> $warnings[[4]][[4]] #> [1] \"\" #> #> #> $warnings[[5]] #> $warnings[[5]][[1]] #> [1] \"3\" #> #> $warnings[[5]][[2]] #> [1] \"readLength\" #> #> $warnings[[5]][[3]] #> [1] \"On row 3 the attribute readLength does not contain the proper value type int.\" #> #> $warnings[[5]][[4]] #> [1] \"\" #> #> #> $warnings[[6]] #> $warnings[[6]][[1]] #> [1] \"2\" #> #> $warnings[[6]][[2]] #> [1] \"readDepth\" #> #> $warnings[[6]][[3]] #> [1] \"On row 2 the attribute readDepth does not contain the proper value type int.\" #> #> $warnings[[6]][[4]] #> [1] \"\" #> #> #> $warnings[[7]] #> $warnings[[7]][[1]] #> [1] \"3\" #> #> $warnings[[7]][[2]] #> [1] \"readDepth\" #> #> $warnings[[7]][[3]] #> [1] \"On row 3 the attribute readDepth does not contain the proper value type int.\" #> #> $warnings[[7]][[4]] #> [1] \"\" #> #> #> $warnings[[8]] #> $warnings[[8]][[1]] #> [1] \"2\" #> #> $warnings[[8]][[2]] #> [1] \"experimentalTimepoint\" #> #> $warnings[[8]][[3]] #> [1] \"On row 2 the attribute experimentalTimepoint does not contain the proper value type num.\" #> #> $warnings[[8]][[4]] #> [1] \"\" #> #> #> $warnings[[9]] #> $warnings[[9]][[1]] #> [1] \"3\" #> #> $warnings[[9]][[2]] #> [1] \"experimentalTimepoint\" #> #> $warnings[[9]][[3]] #> [1] \"On row 3 the attribute experimentalTimepoint does not contain the proper value type num.\" #> #> $warnings[[9]][[4]] #> [1] \"\""},{"path":"/articles/revalidation-workflows.html","id":"alternative-with-dataset-entity","dir":"Articles","previous_headings":"","what":"Alternative with dataset entity","title":"Revalidation workflows","text":"working dataset entities extra checks, slightly different workflow can applied instead. Dataset entities may complicated can combine files different times places (batches). Generate manifest Synapse dataset entity. need use remanifest; understand differences, read docs running ?remanifest. use first item demo. reproducibility original manifest “manifest_rd1.csv” vignettes folder. Run precheck. Make copy “manifest_rd1.csv”, e.g. “manifest_rd1_corrected.csv” use precheck notes help make corrections. Validate “manifest_rd1_corrected.csv” using schematic service (, run since general validation output already shown). Make corrections needed. Finally submit corrected manifest.","code":"datasets <- list_project_datasets(project_id = \"syn4939902\", type = \"dataset\") new_datasets <- Filter(function(d) as.Date(d$createdOn) > as.Date(\"2023-12-01\"), datasets) # or filter by name test <- new_datasets[[1]]$id remanifest(test, file = \"manifest_rd1.csv\") #> ✔️ Saved manifest as manifest_rd1.csv precheck_manifest(\"manifest_rd1.csv\") #> ❌ Multiple components detected in a single manifest: 'RNASeqTemplate', 'GenomicsAssayTemplate', ''. This can happen when files were annotated at different eras. #> Suggestions: 1) Split up the manifest because schematic can only validate one type at a type. 2) Harmonize the components if this is sensible. #> For example, RNASeqTemplate is an alias for GenomicsAssayTemplate #> ❌ Blank value '' for Component detected. This can happen because files were annotated before 2022, when Component was introduced for most DCCs. #> ❌ The pattern of these attribute names suggest duplicates: '...1', '...44', '...46'. This may happen when metadata is supplemented programmatically with a data-type mismatch #> ⚠️ An attribute `Uuid` is present and should preferably be removed. See issue # . #> ⚠️ An attribute `eTag` is present and preferably be removed. #> ℹ️ Custom attributes (not documented in data model) were found: 'entityId', '...1', 'Uuid', 'DNA_ID', 'RNA_ID', 'tissue', 'bodyPart', 'parentSpecimenId', 'eTag', '...44', '...46', 'accessTeam', 'accessType', 'sciDataRelease', 'specimenIdSource', 'timePointUnit', 'transplantationDonorTissue', 'transplantationDonorSpecies'. In general, custom attributes added by the researcher to help with data management are fine. #> Just check that they are not PHI or added by mistake. If they are deemed generally useful or important enough, they can also be documented officially in the data model for others to reference."},{"path":"/articles/survey-public-files.html","id":"intro","dir":"Articles","previous_headings":"","what":"Intro","title":"Surveying public files in the portal","text":"quick makes use functions survey files portal access.","code":""},{"path":"/articles/survey-public-files.html","id":"set-up","dir":"Articles","previous_headings":"","what":"Set up","title":"Surveying public files in the portal","text":"usual setup:","code":"library(nfportalutils) syn_login()"},{"path":"/articles/survey-public-files.html","id":"files-downloadable-for-synapse-registered-users","dir":"Articles","previous_headings":"","what":"Files downloadable for Synapse registered users","title":"Surveying public files in the portal","text":"talking “public” files, usually means files viewable downloadable Synapse users. group id 273948, use query : Breakdown absolute number proportions:","code":"public_access <- summarize_file_access(principal_id = 273948, \"DOWNLOAD\", \"syn16858331\") public_access public_access[, .(n_files = sum(N)), by = access][, .(access, n_files, proportion = n_files / sum(n_files))]"},{"path":"/articles/survey-public-files.html","id":"some-nuances","dir":"Articles","previous_headings":"","what":"Some Nuances","title":"Surveying public files in the portal","text":"nice see file access restrictions different points time, note underlying API returns access control info present. file may inherited benefactor earlier point, becomes benefactor later (.e. granular access control), queries based past state likely work. Don’t try something like:","code":"public_access_q3_2022 <- summarize_file_access(principal_id = 273948, \"DOWNLOAD\", \"syn16858331.47\")"},{"path":"/authors.html","id":null,"dir":"","previous_headings":"","what":"Authors","title":"Authors and Citation","text":"Robert Allaway. Author, maintainer. Anh Nguyet Vu. Author.","code":""},{"path":"/authors.html","id":"citation","dir":"","previous_headings":"","what":"Citation","title":"Authors and Citation","text":"Allaway R, Vu (2025). nfportalutils: NF Portal Utilities. R package version 0.9600, https://github.com/nf-osi/nfportalutils.","code":"@Manual{, title = {nfportalutils: NF Portal Utilities}, author = {Robert Allaway and Anh Nguyet Vu}, year = {2025}, note = {R package version 0.9600}, url = {https://github.com/nf-osi/nfportalutils}, }"},{"path":"/index.html","id":"nfportalutils","dir":"","previous_headings":"","what":"NF Portal Utilities","title":"NF Portal Utilities","text":"goal nfportalutils provide convenience functions project (meta)data management NF-OSI data portal scope. Currently, develop branch default package install docs refer code branch. package interops Python synapse client via reticulate. set (see #Installation). Outside tested versions, may issues. tested versions : - Python Synapse Client == 4.3.1 - reticulate == 1.39.0","code":""},{"path":"/index.html","id":"docs","dir":"","previous_headings":"","what":"Docs","title":"NF Portal Utilities","text":"👉 Package documentation!","code":""},{"path":"/index.html","id":"installation","dir":"","previous_headings":"","what":"Installation","title":"NF Portal Utilities","text":"presumes already set R RStudio. Install reticulate following guide https://rstudio.github.io/reticulate/index.html#installation. Install synapseclient==4.3.1 following https://rstudio.github.io/reticulate/articles/python_packages.html, use default environment “r-reticulate”. Lastly, install nfportalutils. startup, nfportalutils imports synapseclient default “r-reticulate”. regular users: remotes::install_github(\"nf-osi/nfportalutils\", build_vignettes = TRUE) remotes::install_github(\"nf-osi/nfportalutils@-branch\", build_vignettes = TRUE) Clone repo, checkout desired development branch. Make sure package repo root working directory, R run devtools::install(). Browse vignettes: browseVignettes(\"nfportalutils\").","code":""},{"path":"/index.html","id":"for-users","dir":"","previous_headings":"","what":"For Users","title":"NF Portal Utilities","text":"View function reference docs site Reference. alternative viewing vignettes Articles docs site download pkg install load e.g. vignette(\"annotate-nf-processed-data\", package = \"nfportalutils\") view.","code":""},{"path":[]},{"path":"/index.html","id":"general-picture","dir":"","previous_headings":"For Contributors","what":"General picture","title":"NF Portal Utilities","text":", default development happens develop. side branch called develop-synapser interop tries transition synapser. However, current coexistence makes development bit tricky consult first trying new developments .","code":""},{"path":"/index.html","id":"contrib-workflow","dir":"","previous_headings":"For Contributors","what":"Contrib workflow","title":"NF Portal Utilities","text":"Branch develop make changes Run devtools::check(vignettes = FALSE) early often, definitely submitting PR Make pull request develop; run R-CMD-CHECK pkgdown Request reviewer checks pass Reviewer requests changes merges","code":""},{"path":"/index.html","id":"local-development-tips","dir":"","previous_headings":"For Contributors","what":"Local development tips","title":"NF Portal Utilities","text":"vignettes need precomputed. , run devtools::check(vignettes = FALSE) early often. minimal, address ERRORS WARNINGS. Yes, lot NOTES need resolved. custom indexing adding/updating functions, edit _pkgdown.yml. Preview pkg docs site locally pkgdown::build_site().","code":""},{"path":"/index.html","id":"code-of-conduct","dir":"","previous_headings":"","what":"Code of Conduct","title":"NF Portal Utilities","text":"Please note nfportalutils project released Contributor Code Conduct. contributing project, agree abide terms.","code":""},{"path":"/reference/add_activity.html","id":null,"dir":"Reference","previous_headings":"","what":"Add activity to entity — add_activity","title":"Add activity to entity — add_activity","text":"Util adding activity info file entity. See also https://help.synapse.org/docs/Provenance.1972470373.html","code":""},{"path":"/reference/add_activity.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Add activity to entity — add_activity","text":"","code":"add_activity(entity, act_name, act_executed, used_inputs)"},{"path":"/reference/add_activity.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Add activity to entity — add_activity","text":"entity Synapse entity id. act_name Name activity. act_executed Reference activity executed (URL preferred). used_inputs Vector inputs act, e.g. syn ids, links data sources, etc.","code":""},{"path":"/reference/add_activity_batch.html","id":null,"dir":"Reference","previous_headings":"","what":"Add activity to multiple entities — add_activity_batch","title":"Add activity to multiple entities — add_activity_batch","text":"Wrapper provenance function little work expand many--many mappings create records entity, activity, input.","code":""},{"path":"/reference/add_activity_batch.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Add activity to multiple entities — add_activity_batch","text":"","code":"add_activity_batch(entities, act_name, act_executed, used_inputs)"},{"path":"/reference/add_activity_batch.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Add activity to multiple entities — add_activity_batch","text":"entities Vector list entities. act_name Vector list activity name. act_executed Vector list reference activity executed. used_inputs Vector list inputs entity.","code":""},{"path":"/reference/add_default_fileview.html","id":null,"dir":"Reference","previous_headings":"","what":"Create default project fileview — add_default_fileview","title":"Create default project fileview — add_default_fileview","text":"Create default project fileview","code":""},{"path":"/reference/add_default_fileview.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Create default project fileview — add_default_fileview","text":"","code":"add_default_fileview(project)"},{"path":"/reference/add_default_fileview.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Create default project fileview — add_default_fileview","text":"project project entity.","code":""},{"path":"/reference/add_default_folders.html","id":null,"dir":"Reference","previous_headings":"","what":"Create default folders — add_default_folders","title":"Create default folders — add_default_folders","text":"convenience wrapper around make_folder NF defaults.","code":""},{"path":"/reference/add_default_folders.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Create default folders — add_default_folders","text":"","code":"add_default_folders( project, folders = c(\"Analysis\", \"Milestone Reports\", \"Raw Data\") )"},{"path":"/reference/add_default_folders.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Create default folders — add_default_folders","text":"project project Synapse id object. folders Names standard set folders.","code":""},{"path":"/reference/add_default_wiki.html","id":null,"dir":"Reference","previous_headings":"","what":"Add default wiki — add_default_wiki","title":"Add default wiki — add_default_wiki","text":"Add default wiki project creation use retrofit projects creators created wiki.","code":""},{"path":"/reference/add_default_wiki.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Add default wiki — add_default_wiki","text":"","code":"add_default_wiki( project, name, pi, lead, funder, initiative, abstract, institution )"},{"path":"/reference/add_default_wiki.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Add default wiki — add_default_wiki","text":"project Synapse id project. name Name project/study. pi Name principal investigator. lead Name(s) project lead/data coordinator, comma-sep multiple, e.g. \"Jane Doe, John Doe\". funder funding agency. relevant funder team made admin. initiative Title funding initiative, e.g. \"Young Investigator Award\". abstract Project abstract/description. institution Affiliated institution(s), semicolon-sep multiple, e.g. \"Stanford University; University California, San Francisco\".","code":""},{"path":"/reference/add_new_study_meta.html","id":null,"dir":"Reference","previous_headings":"","what":"Helpers ———————————————————————# Add meta for new studies as annotations — add_new_study_meta","title":"Helpers ———————————————————————# Add meta for new studies as annotations — add_new_study_meta","text":"Put selected metadata Synapse annotations study project entity.","code":""},{"path":"/reference/add_new_study_meta.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Helpers ———————————————————————# Add meta for new studies as annotations — add_new_study_meta","text":"","code":"add_new_study_meta(id, study_meta)"},{"path":"/reference/add_new_study_meta.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Helpers ———————————————————————# Add meta for new studies as annotations — add_new_study_meta","text":"id Id container representing study, usually Synapse project. study_meta Study meta list.","code":""},{"path":"/reference/add_people_from_table.html","id":null,"dir":"Reference","previous_headings":"","what":"Update the People table from a source Table or View column — add_people_from_table","title":"Update the People table from a source Table or View column — add_people_from_table","text":"Update People table source Table View column","code":""},{"path":"/reference/add_people_from_table.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Update the People table from a source Table or View column — add_people_from_table","text":"","code":"add_people_from_table( people_table_id, people_column, source_table_id, source_column, dry_run = T )"},{"path":"/reference/add_people_from_table.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Update the People table from a source Table or View column — add_people_from_table","text":"people_table_id synapse id table used referencing people. people_column Column name within people table contains relevant people values. source_table_id synapse id source table. source_column Column name within source table contains relevant source values. dry_run Default = TRUE Skips upload annotations unless set FALSE.","code":""},{"path":"/reference/add_people_from_table.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Update the People table from a source Table or View column — add_people_from_table","text":"dry_run == T, prints preview updated people table, otherwise uploads updates.","code":""},{"path":"/reference/add_publication_from_pubmed.html","id":null,"dir":"Reference","previous_headings":"","what":"Add a publication to the publication table — add_publication_from_pubmed","title":"Add a publication to the publication table — add_publication_from_pubmed","text":"Requires publication PubMed auto-derive metadata authors, title, etc. contrast, disease_focus manifestation need supplemented curator. study_id used get consistent studyName fundingAgency study table without manual input.","code":""},{"path":"/reference/add_publication_from_pubmed.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Add a publication to the publication table — add_publication_from_pubmed","text":"","code":"add_publication_from_pubmed( pmid, study_id, disease_focus, manifestation, publication_table_id, study_table_id, dry_run = T )"},{"path":"/reference/add_publication_from_pubmed.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Add a publication to the publication table — add_publication_from_pubmed","text":"pmid PubMed ID (PMCID) publication added. study_id Synapse id(s) study associated publication. disease_focus disease focus(s) associated publication. manifestation manifestation(s) associated publication. publication_table_id Synapse id portal publication table. Must write access. study_table_id Synapse id portal study table. Need read access. dry_run Default = TRUE. Skips upload table instead prints formatted publication metadata.","code":""},{"path":"/reference/add_publication_from_pubmed.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Add a publication to the publication table — add_publication_from_pubmed","text":"dry_run == T, returns publication metadata added.","code":""},{"path":"/reference/add_publication_from_pubmed.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Add a publication to the publication table — add_publication_from_pubmed","text":"","code":"if (FALSE) { # \\dontrun{ add_publication_from_pubmed( pmid = \"33574490\", study_id = \"syn2343195\", disease_focus = c(\"Neurofibromatosis\"), manifestation = c(\"Meningioma\"), publication_table_id = \"syn16857542\", study_table_id = \"syn16787123\") } # }"},{"path":"/reference/add_publication_from_unpaywall.html","id":null,"dir":"Reference","previous_headings":"","what":"Add a publication or preprint to the publication table via the Unpaywall API. — add_publication_from_unpaywall","title":"Add a publication or preprint to the publication table via the Unpaywall API. — add_publication_from_unpaywall","text":"Add publication publication table. Publication must unpaywall database retrieve info. parameter-provided metadata (e.g. \"studyName\"), function must JSON-formatted character vector destination Synapse column \"STRING_LIST\" format. Currently, function evaluate schema, must checked manually.","code":""},{"path":"/reference/add_publication_from_unpaywall.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Add a publication or preprint to the publication table via the Unpaywall API. — add_publication_from_unpaywall","text":"","code":"add_publication_from_unpaywall( publication_table_id, email_address, doi, is_preprint = F, preprint_server = NULL, study_name, study_id, funding_agency, disease_focus, manifestation, dry_run = T )"},{"path":"/reference/add_publication_from_unpaywall.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Add a publication or preprint to the publication table via the Unpaywall API. — add_publication_from_unpaywall","text":"publication_table_id synapse id portal publication table. Must write access. email_address valid email address. used request metadata Unpaywall API. Please change example real email address help Unpaywall accurately track usage. doi DOI preprint added. is_preprint Default = FALSE. Set TRUE DOI preprint. preprint_server Provide preprint server name. Must one 'bioRxiv', 'medRxiv', 'chemRxiv', 'arXiv' study_name name(s) study associated publication. study_id synapse id(s) study associated publication. funding_agency funding agency(s) associated publication. disease_focus disease focus(s) associated publication. manifestation manifestation(s) associated publication. dry_run Default = TRUE. Skips upload table instead prints formatted publication metadata.","code":""},{"path":"/reference/add_publication_from_unpaywall.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Add a publication or preprint to the publication table via the Unpaywall API. — add_publication_from_unpaywall","text":"dry_run == T, returns publication metadata added.","code":""},{"path":"/reference/add_publication_from_unpaywall.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Add a publication or preprint to the publication table via the Unpaywall API. — add_publication_from_unpaywall","text":"","code":"if (FALSE) { # \\dontrun{ add_publication_from_unpaywall(publication_table_id = 'syn16857542', email_address = 'foo@bar.com', doi = '10.1074/jbc.RA120.014960', study_name = c(toJSON(\"Synodos NF2\")), study_id = c(toJSON(\"syn2343195\")), funding_agency = c(toJSON(\"CTF\")), disease_focus = \"Neurofibromatosis 2\", manifestation = c(toJSON(\"Meningioma\")), dry_run = T) } # }"},{"path":"/reference/add_publications_from_file.html","id":null,"dir":"Reference","previous_headings":"","what":"Add a batch of publications from spreadsheet — add_publications_from_file","title":"Add a batch of publications from spreadsheet — add_publications_from_file","text":"Add batch publications spreadsheet","code":""},{"path":"/reference/add_publications_from_file.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Add a batch of publications from spreadsheet — add_publications_from_file","text":"","code":"add_publications_from_file( file, publication_table_id, study_table_id, list_sep = \"|\", dry_run = TRUE )"},{"path":"/reference/add_publications_from_file.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Add a batch of publications from spreadsheet — add_publications_from_file","text":"file Spreadsheet (.csv/.tsv) pubs add pmid, studyId, diseaseFocus, manifestation. pmid one per row unique, rest can list_sep vals. publication_table_id Synapse id portal publication table. Must write access. study_table_id Synapse id portal study table. Need read access. list_sep Delimiter character used separate list columns. dry_run Default = TRUE. Skips upload table instead prints formatted publication metadata.","code":""},{"path":"/reference/add_study_summary.html","id":null,"dir":"Reference","previous_headings":"","what":"Add studyId-summary key-value only — add_study_summary","title":"Add studyId-summary key-value only — add_study_summary","text":"Add studyId-summary key-value ","code":""},{"path":"/reference/add_study_summary.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Add studyId-summary key-value only — add_study_summary","text":"","code":"add_study_summary(study_id, summary, table_id = \"syn16787123\")"},{"path":"/reference/add_to_collection.html","id":null,"dir":"Reference","previous_headings":"","what":"Add to collection — add_to_collection","title":"Add to collection — add_to_collection","text":"Add items(s) existing collection, using item(s)' current (latest) version. datasets, items files. dataset collections, items datasets. item attempting added happens already collection, might lead version conflicts, update rejected unless force true.","code":""},{"path":"/reference/add_to_collection.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Add to collection — add_to_collection","text":"","code":"add_to_collection(collection_id, items, check_items = FALSE, force = FALSE)"},{"path":"/reference/add_to_collection.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Add to collection — add_to_collection","text":"collection_id Collection id. items Character vector one dataset entity ids add. check_items Whether check ids really appropriate item types remove non-appropriate item types help avoid Synapse errors (default FALSE cases items curated, using check slower). force items currently collection different version, items force-added using current version? safe default FALSE ensure updates intentional.","code":""},{"path":"/reference/add_to_collection.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Add to collection — add_to_collection","text":"implemented lower-level REST API Python client (v2.7) yet implement dataset collection class methods (dataset relevant methods like add_item method available). Thus, generic enough handle datasets dataset collections expected used dataset collections given dataset method provided.","code":""},{"path":"/reference/add_to_scope.html","id":null,"dir":"Reference","previous_headings":"","what":"Add to scope — add_to_scope","title":"Add to scope — add_to_scope","text":"Convenience function add container view scope.","code":""},{"path":"/reference/add_to_scope.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Add to scope — add_to_scope","text":"","code":"add_to_scope(view_id, container_id)"},{"path":"/reference/add_to_scope.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Add to scope — add_to_scope","text":"view_id Id view container_id Id container add.","code":""},{"path":"/reference/adjust_list_length.html","id":null,"dir":"Reference","previous_headings":"","what":"Adjust schema max list length based on hint — adjust_list_length","title":"Adjust schema max list length based on hint — adjust_list_length","text":"Adjust schema max list length based hint","code":""},{"path":"/reference/adjust_list_length.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Adjust schema max list length based on hint — adjust_list_length","text":"","code":"adjust_list_length(view, hint, check_byte_budget = TRUE)"},{"path":"/reference/adjust_string_size.html","id":null,"dir":"Reference","previous_headings":"","what":"Adjust schema max size based on hint — adjust_string_size","title":"Adjust schema max size based on hint — adjust_string_size","text":"Note: STRING cols, hard limit 1000 char size, though 250 using LARGETEXT officially recommended, possibly breakpoint just create different column type instead increasing size.","code":""},{"path":"/reference/adjust_string_size.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Adjust schema max size based on hint — adjust_string_size","text":"","code":"adjust_string_size(view, hint, check_byte_budget = TRUE)"},{"path":"/reference/adjust_view.html","id":null,"dir":"Reference","previous_headings":"","what":"Adjust view — adjust_view","title":"Adjust view — adjust_view","text":"view schema data mismatched, view built therefore queried. common causes (likely accounting 98%+ instances combined) max size/length issues. iteratively update schema based exactly whatever server saying * sizing list length issues, view functional querying works. However, issue one , fail handlers problems currently implemented.","code":""},{"path":"/reference/adjust_view.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Adjust view — adjust_view","text":"","code":"adjust_view(view, max_tries = 5L, check_byte_budget = TRUE)"},{"path":"/reference/adjust_view.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Adjust view — adjust_view","text":"view Synapse view id. max_tries Max number tries. Vast majority views accumulated 1-2 bad data mutations, default 5 reasonable. check_byte_budget Check lead exceeding table budget.","code":""},{"path":"/reference/adjust_view.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Adjust view — adjust_view","text":"*Note: Fixes applied iteratively server currently surfaces repair recommendations.","code":""},{"path":"/reference/annotate_aligned_reads.html","id":null,"dir":"Reference","previous_headings":"","what":"Annotate processed aligned reads — annotate_aligned_reads","title":"Annotate processed aligned reads — annotate_aligned_reads","text":"Given manifest, annotate data aligned reads data. Returns \"partial\" manifest, can adjusted needed, e.g. add additional comments batch info.","code":""},{"path":"/reference/annotate_aligned_reads.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Annotate processed aligned reads — annotate_aligned_reads","text":"","code":"annotate_aligned_reads( metadata, workflow_link, genomic_reference = \"GRCh38\", verbose = TRUE )"},{"path":"/reference/annotate_aligned_reads.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Annotate processed aligned reads — annotate_aligned_reads","text":"metadata Metadata table build upon. workflow_link Workflow link specific part workflow generating data. genomic_reference aligned reads, genomic reference meta present; defaults GRCh38. verbose Give verbose reports happening.","code":""},{"path":"/reference/annotate_called_variants.html","id":null,"dir":"Reference","previous_headings":"","what":"Annotate somatic or germline variants output — annotate_called_variants","title":"Annotate somatic or germline variants output — annotate_called_variants","text":"Given manifest, annotate data variant data (vcf maf).","code":""},{"path":"/reference/annotate_called_variants.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Annotate somatic or germline variants output — annotate_called_variants","text":"","code":"annotate_called_variants(metadata, workflow_link, verbose = TRUE)"},{"path":"/reference/annotate_called_variants.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Annotate somatic or germline variants output — annotate_called_variants","text":"metadata Metadata table build upon. workflow_link Workflow link specific part workflow generating data. verbose Give verbose reports happening.","code":""},{"path":"/reference/annotate_called_variants.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Annotate somatic or germline variants output — annotate_called_variants","text":"maf files use template different default values. future mafs require significantly different template, factored separate annotation function.","code":""},{"path":"/reference/annotate_processed.html","id":null,"dir":"Reference","previous_headings":"","what":"Annotate processed data — annotate_processed","title":"Annotate processed data — annotate_processed","text":"Annotate processed data type inferred output.","code":""},{"path":"/reference/annotate_processed.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Annotate processed data — annotate_processed","text":"","code":"annotate_processed(metadata, ...)"},{"path":"/reference/annotate_processed.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Annotate processed data — annotate_processed","text":"metadata Metadata. ... parameters pass called annotation function.","code":""},{"path":"/reference/annotate_quantified_expression.html","id":null,"dir":"Reference","previous_headings":"","what":"Annotate quantified expression output — annotate_quantified_expression","title":"Annotate quantified expression output — annotate_quantified_expression","text":"Given manifest, annotate data level 3 processed expression data, using defaults star_salmon processing. Returns \"partial\" manifest, can adjusted needed, e.g. add additional comments batch info.","code":""},{"path":"/reference/annotate_quantified_expression.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Annotate quantified expression output — annotate_quantified_expression","text":"","code":"annotate_quantified_expression(metadata, workflow_link, verbose = TRUE)"},{"path":"/reference/annotate_quantified_expression.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Annotate quantified expression output — annotate_quantified_expression","text":"metadata Metadata table build upon. workflow_link Workflow link specific part workflow generating data. verbose Give verbose reports happening.","code":""},{"path":"/reference/annotate_reports_sarek.html","id":null,"dir":"Reference","previous_headings":"","what":"Annotate Sarek reports — annotate_reports_sarek","title":"Annotate Sarek reports — annotate_reports_sarek","text":"First runs map_reports_sarek hood.","code":""},{"path":"/reference/annotate_reports_sarek.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Annotate Sarek reports — annotate_reports_sarek","text":"","code":"annotate_reports_sarek(syn_out, project, dry_run)"},{"path":"/reference/annotate_reports_sarek.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Annotate Sarek reports — annotate_reports_sarek","text":"syn_out Reports output folder set scope fileview. project Project put fileview. dry_run Whether submit annotations just return manifest.","code":""},{"path":"/reference/annotate_with_manifest.html","id":null,"dir":"Reference","previous_headings":"","what":"Set annotations from a manifest — annotate_with_manifest","title":"Set annotations from a manifest — annotate_with_manifest","text":"Synapse docs suggest batch annotations fileview. However, often simpler modify set new annotations directly given table just entities (rows) props (cols) want. like schematic works, except without validation (works best power-users know data model well). desired defaults taken account, submitting key-values NA empty strings.","code":""},{"path":"/reference/annotate_with_manifest.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Set annotations from a manifest — annotate_with_manifest","text":"","code":"annotate_with_manifest( manifest, ignore_na = TRUE, ignore_blank = TRUE, verbose = FALSE )"},{"path":"/reference/annotate_with_manifest.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Set annotations from a manifest — annotate_with_manifest","text":"manifest data.frame representing manifest. Needs contain entityId (parsed standard manifest.csv, df already contain entityId). ignore_na Whether ignore annotations NA; default TRUE. ignore_blank Whether ignore annotations empty strings; default TRUE. verbose chatty, default FALSE.","code":""},{"path":"/reference/annotate_with_samtools_stats.html","id":null,"dir":"Reference","previous_headings":"","what":"Make annotations from samtools stats — annotate_with_samtools_stats","title":"Make annotations from samtools stats — annotate_with_samtools_stats","text":"Extracts subset samtools stats Regarding selection stats, see Genomic Data Commons (GDC) model AlignedReads","code":""},{"path":"/reference/annotate_with_samtools_stats.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Make annotations from samtools stats — annotate_with_samtools_stats","text":"","code":"annotate_with_samtools_stats(meta, samtools_stats_file = NULL)"},{"path":"/reference/annotate_with_samtools_stats.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Make annotations from samtools stats — annotate_with_samtools_stats","text":"meta Data tool stats added additional meta. samtools_stats_file Path file/syn id file samtools stats produced workflow.","code":""},{"path":"/reference/annotation_rule.html","id":null,"dir":"Reference","previous_headings":"","what":"Match to output-specific annotation function — annotation_rule","title":"Match to output-specific annotation function — annotation_rule","text":"encodes logic annotation checks, e.g. DeepVariant Germline variant calling , Mutect2 Somatic variant calling , FreeBayes Strelka2 can applied ; see https://raw.githubusercontent.com/nf-core/sarek/3.4.2//docs/images/sarek_workflow.png","code":""},{"path":"/reference/annotation_rule.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Match to output-specific annotation function — annotation_rule","text":"","code":"annotation_rule(outputFrom, which = c(\"format_as\", \"annotate_as\", \"template\"))"},{"path":"/reference/append_kv.html","id":null,"dir":"Reference","previous_headings":"","what":"Append key-value pair dependent on value being given — append_kv","title":"Append key-value pair dependent on value being given — append_kv","text":"Append key-value pair dependent value given","code":""},{"path":"/reference/append_kv.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Append key-value pair dependent on value being given — append_kv","text":"","code":"append_kv(x, key, value, quote = FALSE)"},{"path":"/reference/append_kv.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Append key-value pair dependent on value being given — append_kv","text":"quote Quote value (needed contains conflict YAML characters).","code":""},{"path":"/reference/as_coll_items.html","id":null,"dir":"Reference","previous_headings":"","what":"Structure as collection items — as_coll_items","title":"Structure as collection items — as_coll_items","text":"Helper taking entity ids create records used dataset items dataset collection items. Collection items form list(entityId = id, versionNumber = x).","code":""},{"path":"/reference/as_coll_items.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Structure as collection items — as_coll_items","text":"","code":"as_coll_items(ids, item_version = c(\"abs\", \"stable\"))"},{"path":"/reference/as_coll_items.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Structure as collection items — as_coll_items","text":"ids Ids entities make dataset items. item_version Integer version used items, e.g. 1. Otherwise, \"latest\" \"stable_latest\". See details.","code":""},{"path":"/reference/as_coll_items.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Structure as collection items — as_coll_items","text":"Note: item version, dataset items allow two meanings literal absolute \"latest\" vs. \"stable_latest\", files either one can used mean thing since correct interpretation done hood. See implementation latest_version.","code":""},{"path":"/reference/as_mmd_link.html","id":null,"dir":"Reference","previous_headings":"","what":"Generate notation for mermaid.js link — as_mmd_link","title":"Generate notation for mermaid.js link — as_mmd_link","text":"Generate notation mermaid.js link","code":""},{"path":"/reference/as_mmd_link.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Generate notation for mermaid.js link — as_mmd_link","text":"","code":"as_mmd_link(n1, n2, directional = TRUE, style = \"solid\")"},{"path":"/reference/as_mmd_link.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Generate notation for mermaid.js link — as_mmd_link","text":"n1 Id node one end. n2 Id node end. directional Boolean option diretional arrow n1 n2. style Option \"solid\" \"dash\"-style links.","code":""},{"path":"/reference/as_mmd_node.html","id":null,"dir":"Reference","previous_headings":"","what":"Generate notation for mermaid.js nodes — as_mmd_node","title":"Generate notation for mermaid.js nodes — as_mmd_node","text":"Generate notation mermaid.js nodes","code":""},{"path":"/reference/as_mmd_node.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Generate notation for mermaid.js nodes — as_mmd_node","text":"","code":"as_mmd_node(entity, class = c(\"Project\", \"Dataset\", \"Folder\"))"},{"path":"/reference/as_mmd_node.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Generate notation for mermaid.js nodes — as_mmd_node","text":"entity Character vector one entity ids. named, nodes use names instead ids labels. Note entity ids starting \"_\" considered blank nodes treated specially. class Optional, add class node.","code":""},{"path":"/reference/as_table_schema.html","id":null,"dir":"Reference","previous_headings":"","what":"Transform table data to target schema for Synapse storage — as_table_schema","title":"Transform table data to target schema for Synapse storage — as_table_schema","text":"Currently implements list-schema features first later. Check encode data values expectations Synapse target table schema storage. target schema likely existing table, since new tables can take advantage build_table. get compatible list data, JSON encoding optionally list_truncate running length limits. truncation OK, incompatibility resolved updating schema outside . Note setting applies list columns, though desirable column-specific.","code":""},{"path":"/reference/as_table_schema.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Transform table data to target schema for Synapse storage — as_table_schema","text":"","code":"as_table_schema(df, schema, list_truncate = FALSE)"},{"path":"/reference/as_table_schema.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Transform table data to target schema for Synapse storage — as_table_schema","text":"df table, .e. data.frame. schema Table schema object Synapse id target table get schema. list_truncate length exceeds schema max list columns, set TRUE allow data truncation, FALSE error (default).","code":""},{"path":"/reference/as_table_schema.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Transform table data to target schema for Synapse storage — as_table_schema","text":"Synapse Table object ready storing.","code":""},{"path":"/reference/assign_study_data_types.html","id":null,"dir":"Reference","previous_headings":"","what":"Summarize data types for the study — assign_study_data_types","title":"Summarize data types for the study — assign_study_data_types","text":"Data types summarized, \"rolled-\", study based child file annotations. Summary values added back overwrites current dataType annotation study. See also related update_study_annotations, study-level annotations rolled child files. Note --hood now wraps generalized util summarize_attribute.","code":""},{"path":"/reference/assign_study_data_types.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Summarize data types for the study — assign_study_data_types","text":"","code":"assign_study_data_types( study_table_id, fileview_id, id_col = \"studyId\", attribute = \"dataType\", dry_run = TRUE )"},{"path":"/reference/assign_study_data_types.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Summarize data types for the study — assign_study_data_types","text":"study_table_id Synapse ID reference portal study table. Used get study ids. fileview_id Synapse ID reference portal fileview. id_col Name study id column study_table_id fileview_id. Defaults studyId. attribute Attribute summarized using fileview. Defaults dataType. dry_run Default = TRUE. Whether update well just return list annotation objects.","code":""},{"path":"/reference/assign_study_data_types.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Summarize data types for the study — assign_study_data_types","text":"List annotations objects.","code":""},{"path":"/reference/assign_study_data_types.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Summarize data types for the study — assign_study_data_types","text":"","code":"if (FALSE) { # \\dontrun{ assign_study_data_types(study_table_id = 'syn52694652', fileview_id = 'syn16858331', id_col = 'studyId', attribute = 'dataType', dry_run = T) } # }"},{"path":"/reference/bad_url.html","id":null,"dir":"Reference","previous_headings":"","what":"Helper function to check urls — bad_url","title":"Helper function to check urls — bad_url","text":"Check whether URL(s) return HTTP error, indicating broken link. Note uses curl hood, may give timeout errors therefore false positives links valid take long resolve.","code":""},{"path":"/reference/bad_url.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Helper function to check urls — bad_url","text":"","code":"bad_url(url)"},{"path":"/reference/bad_url.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Helper function to check urls — bad_url","text":"url character vector one URLs.","code":""},{"path":"/reference/bad_url.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Helper function to check urls — bad_url","text":"Result vector size values \"bad\" \"ok\".","code":""},{"path":"/reference/bare_syn_id.html","id":null,"dir":"Reference","previous_headings":"","what":"Extract synapse id from URI or other string — bare_syn_id","title":"Extract synapse id from URI or other string — bare_syn_id","text":"Extract synapse id URI string","code":""},{"path":"/reference/bare_syn_id.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Extract synapse id from URI or other string — bare_syn_id","text":"","code":"bare_syn_id(uri)"},{"path":"/reference/bare_syn_id.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Extract synapse id from URI or other string — bare_syn_id","text":"uri URI string containing embedded Synapse id.","code":""},{"path":"/reference/bind_schema.html","id":null,"dir":"Reference","previous_headings":"","what":"Wrapper for JSON schema binding — bind_schema","title":"Wrapper for JSON schema binding — bind_schema","text":"See https://help.synapse.org/docs/JSON-Schemas.3107291536.html","code":""},{"path":"/reference/bind_schema.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Wrapper for JSON schema binding — bind_schema","text":"","code":"bind_schema(id, schema_id, derived_annotations = FALSE)"},{"path":"/reference/bind_schema.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Wrapper for JSON schema binding — bind_schema","text":"id Id entity schema bound schema_id Schema id registered Synapse. derived_annotations Whether enabled derived annotations. Default FALSE API default.","code":""},{"path":"/reference/bipartite_mmd_template.html","id":null,"dir":"Reference","previous_headings":"","what":"Simple bipartite representation in mermaid charts — bipartite_mmd_template","title":"Simple bipartite representation in mermaid charts — bipartite_mmd_template","text":"Simple bipartite representation mermaid charts","code":""},{"path":"/reference/bipartite_mmd_template.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Simple bipartite representation in mermaid charts — bipartite_mmd_template","text":"","code":"bipartite_mmd_template( nodeset1, nodeset2, nodeset1_title = \"INPUT\", nodeset2_title = \"OUTPUT\", links = \"\" )"},{"path":"/reference/bipartite_mmd_template.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Simple bipartite representation in mermaid charts — bipartite_mmd_template","text":"nodeset1 Character vector one node ids. named, nodes use names instead ids labels. nodeset2 Character vector one node ids. named, nodes use names instead ids labels. nodeset1_title Title nodeset1. nodeset2_title Title nodeset2. links Optional, character vector edges nodes.","code":""},{"path":"/reference/button_widget.html","id":null,"dir":"Reference","previous_headings":"","what":"Generate button widget for a Synapse wiki — button_widget","title":"Generate button widget for a Synapse wiki — button_widget","text":"Generate markup button widget Synapse project wiki. Refer widget docs https://help.synapse.org/docs/Wikis.1975746682.html#Wikis-WikiWidgets. Buttons created sparingly strategically. See remove_button case future regret.","code":""},{"path":"/reference/button_widget.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Generate button widget for a Synapse wiki — button_widget","text":"","code":"button_widget(label, url, align = c(\"None\", \"Left\", \"Right\", \"Center\"))"},{"path":"/reference/button_widget.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Generate button widget for a Synapse wiki — button_widget","text":"label Button label text. url URL button link . align Button alignment, can one \"None\", \"Left\", \"Right\", \"Center\" (defaults \"None\").","code":""},{"path":"/reference/byte_budget.html","id":null,"dir":"Reference","previous_headings":"","what":"Calculate byte budget for a schema — byte_budget","title":"Calculate byte budget for a schema — byte_budget","text":"Tables hard width limit 64KB. Given current table schema, math many bytes remain already allocated. Useful austerity measure indeed one large table, cases philosophically principled schema configuration.","code":""},{"path":"/reference/byte_budget.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Calculate byte budget for a schema — byte_budget","text":"","code":"byte_budget(table, schema_cols = NULL, result = \"remaining\")"},{"path":"/reference/byte_budget.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Calculate byte budget for a schema — byte_budget","text":"table Existing Synapse table id table schema object used retrieve column types. schema_cols Optional, also can take list column characteristics; use building scratch columns yet stored. given, table ignored. result Return summary number \"remaining\" \"allocated\", return TRUE/FALSE \"within\" budget.","code":""},{"path":"/reference/byte_budget.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Calculate byte budget for a schema — byte_budget","text":"See also: https://rest-docs.synapse.org/rest/org/sagebionetworks/repo/model/table/ColumnType.html","code":""},{"path":"/reference/calc_study_dist_dtm.html","id":null,"dir":"Reference","previous_headings":"","what":"Calculate study distance based on summary text — calc_study_dist_dtm","title":"Calculate study distance based on summary text — calc_study_dist_dtm","text":"different measures similarity; gives cosine similarity based summary text, converted distance matrix. future, methods may used comparison ensemble.","code":""},{"path":"/reference/calc_study_dist_dtm.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Calculate study distance based on summary text — calc_study_dist_dtm","text":"","code":"calc_study_dist_dtm(studies)"},{"path":"/reference/calc_study_dist_dtm.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Calculate study distance based on summary text — calc_study_dist_dtm","text":"studies data.frame row \"document\"; summary studyId.","code":""},{"path":"/reference/calculate_related_studies.html","id":null,"dir":"Reference","previous_headings":"","what":"Calculate and add related studies to study table — calculate_related_studies","title":"Calculate and add related studies to study table — calculate_related_studies","text":"Processes study summary text identify clusters related studies. Calculates tf-idf values 1 2 length ngrams, clusters studies using ward.D clustering method. Adds results annotations studies.","code":""},{"path":"/reference/calculate_related_studies.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Calculate and add related studies to study table — calculate_related_studies","text":"","code":"calculate_related_studies( study_table_id, n_clust = NULL, n_k = NULL, dry_run = TRUE )"},{"path":"/reference/calculate_related_studies.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Calculate and add related studies to study table — calculate_related_studies","text":"study_table_id synapse id portal study table. Must write access. n_clust Target number clusters generate using hierarchical clustering. practice, number total summaries divided 3 good starting point (100 studies = 33 clusters). given n_k ignored. n_k Generate target number closely related studies using k-nearest-neighbors instead; since number desired related studies specified, may preferable using n_clust, gives variable number related studies clusters vary size. Ignored n_clust already given. dry_run Default = TRUE. Skips annotating studies instead prints study tibble.","code":""},{"path":"/reference/calculate_related_studies.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Calculate and add related studies to study table — calculate_related_studies","text":"dry_run == T, returns study tibble skips upload.","code":""},{"path":"/reference/calculate_related_studies.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Calculate and add related studies to study table — calculate_related_studies","text":"","code":"if (FALSE) { # \\dontrun{ result1 <- calculate_related_studies(study_table_id = \"syn16787123\", n_clust = 40, dry_run = T) result2 <- calculate_related_studies(study_table_id = \"syn16787123\", n_k = 4, dry_run = T) x <- lapply(result1$relatedStudies, jsonlite::fromJSON) y <- lapply(result2$relatedStudies, jsonlite::fromJSON) # Compare mapply(function(x, y) sum(y %in% x), x, y) } # }"},{"path":"/reference/cbp_add_clinical.html","id":null,"dir":"Reference","previous_headings":"","what":"Export and add clinical data to cBioPortal dataset — cbp_add_clinical","title":"Export and add clinical data to cBioPortal dataset — cbp_add_clinical","text":"run existing dataset package root.","code":""},{"path":"/reference/cbp_add_clinical.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Export and add clinical data to cBioPortal dataset — cbp_add_clinical","text":"","code":"cbp_add_clinical(clinical_data, ref_map, verbose = TRUE)"},{"path":"/reference/cbp_add_clinical.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Export and add clinical data to cBioPortal dataset — cbp_add_clinical","text":"clinical_data Clinical table query. ref_map YAML file specifying mapping (NF) clinical metadata cBioPortal model. See details. verbose Whether provide informative messages throughout.","code":""},{"path":"/reference/cbp_add_clinical.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Export and add clinical data to cBioPortal dataset — cbp_add_clinical","text":"Clinical data mapped exported according reference mapping. Also reformatting PATIENT_ID, SAMPLE_ID contain letters, numbers, points, underscores, hyphens; Nextflow processing spaces gets replaced underscores default . check missing samples, final validation via cBioPortal tool still expected .","code":""},{"path":"/reference/cbp_add_cna.html","id":null,"dir":"Reference","previous_headings":"","what":"Export and add CNA (seg) data to cBioPortal dataset — cbp_add_cna","title":"Export and add CNA (seg) data to cBioPortal dataset — cbp_add_cna","text":"run existing dataset package root.","code":""},{"path":"/reference/cbp_add_cna.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Export and add CNA (seg) data to cBioPortal dataset — cbp_add_cna","text":"","code":"cbp_add_cna(cna_data, verbose = TRUE)"},{"path":"/reference/cbp_add_cna.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Export and add CNA (seg) data to cBioPortal dataset — cbp_add_cna","text":"cna_data Synapse id CNA data file, currently handles .seg file. verbose Whether chatty.","code":""},{"path":"/reference/cbp_add_expression.html","id":null,"dir":"Reference","previous_headings":"","what":"Export and add expression data to cBioPortal dataset — cbp_add_expression","title":"Export and add expression data to cBioPortal dataset — cbp_add_expression","text":"run existing dataset package root. Note number different options generated STAR Salmon pipeline. cBioPortal confirmed prefer normalized counts gene_tpm.tsv , though used, find helpful also raw counts gene_counts.tsv.","code":""},{"path":"/reference/cbp_add_expression.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Export and add expression data to cBioPortal dataset — cbp_add_expression","text":"","code":"cbp_add_expression(expression_data, expression_data_raw = NULL, verbose = TRUE)"},{"path":"/reference/cbp_add_expression.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Export and add expression data to cBioPortal dataset — cbp_add_expression","text":"expression_data Syn id normalized gene counts results (default TPM). See details. expression_data_raw (Optional) Syn id raw counts results. See details. verbose Whether chatty.","code":""},{"path":"/reference/cbp_add_maf.html","id":null,"dir":"Reference","previous_headings":"","what":"Export and add mutations data to cBioPortal dataset — cbp_add_maf","title":"Export and add mutations data to cBioPortal dataset — cbp_add_maf","text":"run existing dataset package root.","code":""},{"path":"/reference/cbp_add_maf.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Export and add mutations data to cBioPortal dataset — cbp_add_maf","text":"","code":"cbp_add_maf(maf_data, verbose = TRUE)"},{"path":"/reference/cbp_add_maf.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Export and add mutations data to cBioPortal dataset — cbp_add_maf","text":"maf_data Synapse id merged maf file public release. verbose Whether chatty.","code":""},{"path":"/reference/cbp_add_maf.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Export and add mutations data to cBioPortal dataset — cbp_add_maf","text":"Get merged maf file represents filtered subset mafs containing (non-germline) data OK release publicly. needs packaged files like example public mutations dataset.","code":""},{"path":"/reference/cbp_datatypes.html","id":null,"dir":"Reference","previous_headings":"","what":"Enumerate combinations of valid cBP data types and data subtypes and helper utils if available — cbp_datatypes","title":"Enumerate combinations of valid cBP data types and data subtypes and helper utils if available — cbp_datatypes","text":"https://docs.cbioportal.org/file-formats/","code":""},{"path":"/reference/cbp_datatypes.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Enumerate combinations of valid cBP data types and data subtypes and helper utils if available — cbp_datatypes","text":"","code":"cbp_datatypes()"},{"path":"/reference/cbp_new_cancer_type.html","id":null,"dir":"Reference","previous_headings":"","what":"Create reference file for new cancer type — cbp_new_cancer_type","title":"Create reference file for new cancer type — cbp_new_cancer_type","text":"Helper creating reference new cancer subtype already exist. https://docs.cbioportal.org/file-formats/#cancer-type","code":""},{"path":"/reference/cbp_new_cancer_type.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Create reference file for new cancer type — cbp_new_cancer_type","text":"","code":"cbp_new_cancer_type(type_of_cancer, name, color, parent_type_of_cancer)"},{"path":"/reference/cbp_new_cancer_type.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Create reference file for new cancer type — cbp_new_cancer_type","text":"type_of_cancer Id new cancer type, e.g. \"cnf\". name Full name new cancer type, e.g. \"Cutaneous Neurofibroma\" color Color name new cancer; https://en.wikipedia.org/wiki/Web_colors#X11_color_names. parent_type_of_cancer Id existing parent, e.g. \"nfib\" Neurofibroma.","code":""},{"path":"/reference/cbp_new_study.html","id":null,"dir":"Reference","previous_headings":"","what":"Initialize a new cBioPortal study dataset — cbp_new_study","title":"Initialize a new cBioPortal study dataset — cbp_new_study","text":"Create new directory basic required study meta file, much like create new R package put DESCRIPTION file .","code":""},{"path":"/reference/cbp_new_study.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Initialize a new cBioPortal study dataset — cbp_new_study","text":"","code":"cbp_new_study( cancer_study_identifier, name, type_of_cancer, description = \"The data are contributed by researchers funded by the Neurofibromatosis Therapeutic Acceleration Program (NTAP). The reprocessing of the raw data is managed by the NF Open Science Initiative (https://nf.synapse.org/).\", short_name = NULL, citation = NULL, pmid = NULL, groups = \"PUBLIC\", add_global_case_list = TRUE, validate = TRUE, verbose = TRUE )"},{"path":"/reference/cbp_new_study.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Initialize a new cBioPortal study dataset — cbp_new_study","text":"cancer_study_identifier Cancer study identifier format nst_nfosi_ntap_2022. name Name study, e.g. \"Malignant Peripheral Nerve Sheath Tumor (NF-OSI, 2022)\". type_of_cancer Id type cancer. validate TRUE, one things validated warning mismatched. description Description study, defaults generic description can edited later. short_name (Optional) Short name study. citation (Optional) relevant citation, e.g. \"TCGA, Nature 2012\". pmid (Optional) One relevant pubmed ids (comma-separated, whitespace); used, citation NULL. groups (Optional) Defaults \"PUBLIC\" use public cBioPortal; otherwise, use group names makes sense configuration cBioPortal instance. add_global_case_list (Optional) Use NULL ignore, default TRUE \"samples\" case list( generated automatically. validate Validate public cBioPortal configuration. Default TRUE, might want set FALSE especially using custom cBioPortal instance different configuration. verbose Whether chatty.","code":""},{"path":"/reference/check_access.html","id":null,"dir":"Reference","previous_headings":"","what":"Check access — check_access","title":"Check access — check_access","text":"Check access","code":""},{"path":"/reference/check_access.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Check access — check_access","text":"","code":"check_access( id, principal_id, access_type = c(\"CREATE\", \"UPDATE\", \"CHANGE_SETTINGS\", \"DOWNLOAD\", \"MODERATE\", \"READ\", \"CHANGE_PERMISSIONS\", \"DELETE\") )"},{"path":"/reference/check_access.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Check access — check_access","text":"id benefactor entity. principal_id Group(s) check access type. access_type access type(s) check ; result summarizes whether permissions types specified.","code":""},{"path":"/reference/check_byte_budget_col_swap.html","id":null,"dir":"Reference","previous_headings":"","what":"Check byte budget when swapping cols in schema — check_byte_budget_col_swap","title":"Check byte budget when swapping cols in schema — check_byte_budget_col_swap","text":"Check byte budget swapping cols schema","code":""},{"path":"/reference/check_byte_budget_col_swap.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Check byte budget when swapping cols in schema — check_byte_budget_col_swap","text":"","code":"check_byte_budget_col_swap(schema, ref_col, new_col)"},{"path":"/reference/check_cbp_study_id.html","id":null,"dir":"Reference","previous_headings":"","what":"Check that in valid cBioPortal study dataset root — check_cbp_study_id","title":"Check that in valid cBioPortal study dataset root — check_cbp_study_id","text":"cbp_add* functions need run study package root. checks valid study directory returns cancer_study_id.","code":""},{"path":"/reference/check_cbp_study_id.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Check that in valid cBioPortal study dataset root — check_cbp_study_id","text":"","code":"check_cbp_study_id()"},{"path":"/reference/check_cbp_study_id.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Check that in valid cBioPortal study dataset root — check_cbp_study_id","text":"cancer_study_id current cBioPortal cancer study.","code":""},{"path":"/reference/check_readpair_validity.html","id":null,"dir":"Reference","previous_headings":"","what":"Check fastq read pair matches samplesheet read pair assignment. — check_readpair_validity","title":"Check fastq read pair matches samplesheet read pair assignment. — check_readpair_validity","text":"Read pairs often encoded name file. , check encoded name file, samplesheet read pair (e.g. _1 _2) matches","code":""},{"path":"/reference/check_readpair_validity.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Check fastq read pair matches samplesheet read pair assignment. — check_readpair_validity","text":"","code":"check_readpair_validity( samplesheet, parse_fun = function(x) gsub(\"_T[0-9]$\", \"\", x) )"},{"path":"/reference/check_readpair_validity.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Check fastq read pair matches samplesheet read pair assignment. — check_readpair_validity","text":"samplesheet local file syn id samplesheet. parse_fun Function implementing parse samples samplesheet.","code":""},{"path":"/reference/check_readpair_validity.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Check fastq read pair matches samplesheet read pair assignment. — check_readpair_validity","text":"","code":"if (FALSE) { # \\dontrun{ check_readpair_validity('syn39542932') check_readpair_validity('syn29530880') } # }"},{"path":"/reference/check_wiki_links.html","id":null,"dir":"Reference","previous_headings":"","what":"Check wiki links — check_wiki_links","title":"Check wiki links — check_wiki_links","text":"primarily supports wiki quality control. method wraps helpers retrieve wiki content given project(s), extract URL(s) content, return list link check results per project wiki. Note main wiki page checked. well, remove/replace problematic link(s), still may false positive/negatives may need reviewed manually.","code":""},{"path":"/reference/check_wiki_links.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Check wiki links — check_wiki_links","text":"","code":"check_wiki_links(project_id, to_table = TRUE)"},{"path":"/reference/check_wiki_links.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Check wiki links — check_wiki_links","text":"project_id Character vector synapse project id(s) get wiki. to_table TRUE return results table else keep list. Additional downstream operations may prefer one .","code":""},{"path":"/reference/check_wiki_links.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Check wiki links — check_wiki_links","text":"Depending to_table, list tibble projects links check results links. list include projects without links (empty list), table omit projects without links.","code":""},{"path":"/reference/check_wiki_links.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Check wiki links — check_wiki_links","text":"","code":"if (FALSE) { # \\dontrun{ check_wiki_links(project_id = c(\"syn11374354\",\"syn2343195\")) } # }"},{"path":"/reference/checked_message.html","id":null,"dir":"Reference","previous_headings":"","what":"Format checked message notification — checked_message","title":"Format checked message notification — checked_message","text":"nicely displaying step successfully completed.","code":""},{"path":"/reference/checked_message.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Format checked message notification — checked_message","text":"","code":"checked_message(string)"},{"path":"/reference/checked_message.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Format checked message notification — checked_message","text":"string Character string check prepended.","code":""},{"path":"/reference/cite_dataset.html","id":null,"dir":"Reference","previous_headings":"","what":"Generate example dataset citation — cite_dataset","title":"Generate example dataset citation — cite_dataset","text":"currently demo purposes, check well current metadata formatted citation text. Datasets DOIs minted NF-OSI processed datasets within official Portal Collection work well, guarantees cases. Note: Internal/experimental use , production use.","code":""},{"path":"/reference/cite_dataset.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Generate example dataset citation — cite_dataset","text":"","code":"cite_dataset(id, format = \"Scientific Data\", output = c(\"markdown\"))"},{"path":"/reference/cite_dataset.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Generate example dataset citation — cite_dataset","text":"id Dataset id. format Currently just \"Scientific Data\" format. output Currently markdown, utils can used generate LaTeX HTML.","code":""},{"path":"/reference/convert_to_stringlist.html","id":null,"dir":"Reference","previous_headings":"","what":"Convert a delimited string to a stringlist annotation — convert_to_stringlist","title":"Convert a delimited string to a stringlist annotation — convert_to_stringlist","text":"schema change operation updates 1) column type list 2) sets new max string length parameter Synapse Table (usually shrinking max value). can optionally consult metadata model good max string length. (might handle max list length future encoded model well). model consulted, built-check error thrown data model recognize key changed, .e. one wants strict key Table documented model. model involve (schema = NULL), max string length simply set based current values processing delimited list (original code).","code":""},{"path":"/reference/convert_to_stringlist.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Convert a delimited string to a stringlist annotation — convert_to_stringlist","text":"","code":"convert_to_stringlist( fileview_id, annotation_key, sep = \",\", trim_ws = TRUE, schema = NULL, dry_run = TRUE )"},{"path":"/reference/convert_to_stringlist.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Convert a delimited string to a stringlist annotation — convert_to_stringlist","text":"fileview_id synapse id fileview. Must desired annotations schema, must files annotate included scope. Must write access files want re-annotate. annotation_key character string annotation switch delimited string stringlist. sep delimiter character string. Default = \",\". trim_ws Remove white space beginning end list items (e.g. \"NF1, NF2\" \"NF1,NF2\" yield STRING_LIST result). Default = TRUE. schema Optional, path readable .jsonld schema use setting new col schema. See details. dry_run Skip upload table instead prints study tibble. Default = TRUE.","code":""},{"path":"/reference/convert_to_stringlist.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Convert a delimited string to a stringlist annotation — convert_to_stringlist","text":"dry_run == T, returns list updates skips upload.","code":""},{"path":"/reference/copy.html","id":null,"dir":"Reference","previous_headings":"","what":"Create copy of entity — copy","title":"Create copy of entity — copy","text":"Create copy syn entity; mostly used create copy test changes. See https://python-docs.synapse.org/build/html/synapseutils.html?highlight=copy#synapseutils.copy_functions.copy","code":""},{"path":"/reference/copy.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Create copy of entity — copy","text":"","code":"copy( entity, destination_id, skip_copy_wiki_page = FALSE, skip_copy_annotations = FALSE )"},{"path":"/reference/copy.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Create copy of entity — copy","text":"entity Entity copy. destination_id Id destination project/container entity copied . skip_copy_wiki_page Whether skip copying wiki; defaults FALSE. skip_copy_annotations Whether skip copying annotations; defaults FALSE.","code":""},{"path":"/reference/copy_annotations.html","id":null,"dir":"Reference","previous_headings":"","what":"Copy annotations — copy_annotations","title":"Copy annotations — copy_annotations","text":"Copy annotations (selectively) source entity one target entities. annotation keys already exist target entities, copy replace current values.","code":""},{"path":"/reference/copy_annotations.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Copy annotations — copy_annotations","text":"","code":"copy_annotations( entity_from, entity_to, select = NULL, update = FALSE, as_list = TRUE )"},{"path":"/reference/copy_annotations.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Copy annotations — copy_annotations","text":"entity_from Syn id copy. entity_to One syn ids copy annotations . select Vector properties selectively copy present entity. specified, copy everything, may desirable. update Whether immediately update return annotation objects . as_list used update=FALSE; backwards-compatibility downstream usage copy_annotations expects R list, return R list.","code":""},{"path":"/reference/data_curator_app_subpage.html","id":null,"dir":"Reference","previous_headings":"","what":"Create NF Data Curator App subpage — data_curator_app_subpage","title":"Create NF Data Curator App subpage — data_curator_app_subpage","text":"Convenience method create subpage default buttons annotation app docs. highly specific method expected limited lifespan.","code":""},{"path":"/reference/data_curator_app_subpage.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Create NF Data Curator App subpage — data_curator_app_subpage","text":"","code":"data_curator_app_subpage(project_id, dry_run = TRUE)"},{"path":"/reference/data_curator_app_subpage.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Create NF Data Curator App subpage — data_curator_app_subpage","text":"project_id ID owner Synapse project. dry_run Whether return wiki object without actually performing update.","code":""},{"path":"/reference/delete_provenance.html","id":null,"dir":"Reference","previous_headings":"","what":"Remove provenance info — delete_provenance","title":"Remove provenance info — delete_provenance","text":"Remove provenance info","code":""},{"path":"/reference/delete_provenance.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Remove provenance info — delete_provenance","text":"","code":"delete_provenance(entities)"},{"path":"/reference/delete_provenance.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Remove provenance info — delete_provenance","text":"entities Vector list entities.","code":""},{"path":"/reference/derive_annotations.html","id":null,"dir":"Reference","previous_headings":"","what":"Derive annotations for processed output data — derive_annotations","title":"Derive annotations for processed output data — derive_annotations","text":"processed derived file can inherit annotations input file(s). Currently, generously facilitates inheritance many properties except ones \"obviously\" inherited, \"fileFormat\" \"comments\". rules hard-coded might need expanded data model changes.","code":""},{"path":"/reference/derive_annotations.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Derive annotations for processed output data — derive_annotations","text":"","code":"derive_annotations( sample_io, template = NULL, schema = \"https://raw.githubusercontent.com/nf-osi/nf-metadata-dictionary/main/NF.jsonld\", verbose = TRUE )"},{"path":"/reference/derive_annotations.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Derive annotations for processed output data — derive_annotations","text":"sample_io Mapping input output files workflow. template template use deriving annotations. controls attributes relevant transfer/keep. given, use whatever set attribute \"template\". schema Reference data model source. verbose Whether output detailed messages.","code":""},{"path":"/reference/derive_annotations.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Derive annotations for processed output data — derive_annotations","text":"multiple inputs given, inherit annotations FIRST input.","code":""},{"path":"/reference/dot-add_publication_from_pubmed.html","id":null,"dir":"Reference","previous_headings":"","what":"Higher-level fun to generate add_publication_from_pubmed util for one-off usage (default) or optimized for batch processing. — .add_publication_from_pubmed","title":"Higher-level fun to generate add_publication_from_pubmed util for one-off usage (default) or optimized for batch processing. — .add_publication_from_pubmed","text":"Higher-level fun generate add_publication_from_pubmed util one-usage (default) optimized batch processing.","code":""},{"path":"/reference/dot-add_publication_from_pubmed.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Higher-level fun to generate add_publication_from_pubmed util for one-off usage (default) or optimized for batch processing. — .add_publication_from_pubmed","text":"","code":".add_publication_from_pubmed(batch = 0L, cache = batch)"},{"path":"/reference/dot-add_publication_from_pubmed.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Higher-level fun to generate add_publication_from_pubmed util for one-off usage (default) or optimized for batch processing. — .add_publication_from_pubmed","text":"batch non-zero batch size, turns batch mode; defaults -batch. cache Whether cache results, default batch.","code":""},{"path":"/reference/dot-check_login.html","id":null,"dir":"Reference","previous_headings":"","what":"Checks .syn object exists. — .check_login","title":"Checks .syn object exists. — .check_login","text":"Checks .syn object exists.","code":""},{"path":"/reference/dot-check_login.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Checks .syn object exists. — .check_login","text":"","code":".check_login()"},{"path":"/reference/dot-check_login.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Checks .syn object exists. — .check_login","text":"message.","code":""},{"path":"/reference/dot-delim_string_to_vector.html","id":null,"dir":"Reference","previous_headings":"","what":"Convert a delimited string to vector, utility function. — .delim_string_to_vector","title":"Convert a delimited string to vector, utility function. — .delim_string_to_vector","text":"Converts delimited string stringlist annotation adjust associated schema portal fileview.","code":""},{"path":"/reference/dot-delim_string_to_vector.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Convert a delimited string to vector, utility function. — .delim_string_to_vector","text":"","code":".delim_string_to_vector(string, sep, trim_ws = T)"},{"path":"/reference/dot-delim_string_to_vector.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Convert a delimited string to vector, utility function. — .delim_string_to_vector","text":"string character string. sep Default = \",\". delimiter character string. trim_ws Default = TRUE. Remove white space beginning end list items (e.g. \"NF1, NF2\" \"NF1,NF2\" yield STRING_LIST result).","code":""},{"path":"/reference/dot-dict_to_list.html","id":null,"dir":"Reference","previous_headings":"","what":"Convert a flat Python Dict to R list — .dict_to_list","title":"Convert a flat Python Dict to R list — .dict_to_list","text":"internal function used convert Annotations objects returned get_annotations.","code":""},{"path":"/reference/dot-dict_to_list.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Convert a flat Python Dict to R list — .dict_to_list","text":"","code":".dict_to_list(dict)"},{"path":"/reference/dot-dict_to_list.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Convert a flat Python Dict to R list — .dict_to_list","text":"dict flat Python Dict object.","code":""},{"path":"/reference/dot-modify_annotation.html","id":null,"dir":"Reference","previous_headings":"","what":"Modify a single annotation on a single file — .modify_annotation","title":"Modify a single annotation on a single file — .modify_annotation","text":"Modifies single annotation value single (existing) synapse file.","code":""},{"path":"/reference/dot-modify_annotation.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Modify a single annotation on a single file — .modify_annotation","text":"","code":".modify_annotation(synapse_id, key, value)"},{"path":"/reference/dot-modify_annotation.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Modify a single annotation on a single file — .modify_annotation","text":"synapse_id synapse entity id. key key annotation modify. value value change annotation .","code":""},{"path":"/reference/dot-replace_string_column_with_stringlist_column.html","id":null,"dir":"Reference","previous_headings":"","what":"Replace string column with stringlist column — .replace_string_column_with_stringlist_column","title":"Replace string column with stringlist column — .replace_string_column_with_stringlist_column","text":"Guts ripped @jaeddy gist (https://gist.github.com/jaeddy/1cf49f7851945beedb39d431134734af)","code":""},{"path":"/reference/dot-replace_string_column_with_stringlist_column.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Replace string column with stringlist column — .replace_string_column_with_stringlist_column","text":"","code":".replace_string_column_with_stringlist_column( table_id, column_name, max_str_len )"},{"path":"/reference/dot-replace_string_column_with_stringlist_column.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Replace string column with stringlist column — .replace_string_column_with_stringlist_column","text":"table_id synapse entity id. column_name column name relevant column modify. max_str_len Max string length set schema new column.","code":""},{"path":"/reference/dot-store_rows.html","id":null,"dir":"Reference","previous_headings":"","what":"Adds a row to a table. — .store_rows","title":"Adds a row to a table. — .store_rows","text":"Adds row table.","code":""},{"path":"/reference/dot-store_rows.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Adds a row to a table. — .store_rows","text":"","code":".store_rows(schema, new_row)"},{"path":"/reference/dot-store_rows.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Adds a row to a table. — .store_rows","text":"schema synapse table Schema object. new_row data frame one rows match provided schema.","code":""},{"path":"/reference/dot-update_table_data.html","id":null,"dir":"Reference","previous_headings":"","what":"Replace/update table contents = input data must have ROW_ID and ROW_VERSION columns to update, otherwise will append data. — .update_table_data","title":"Replace/update table contents = input data must have ROW_ID and ROW_VERSION columns to update, otherwise will append data. — .update_table_data","text":"Replace/update table contents = input data must ROW_ID ROW_VERSION columns update, otherwise append data.","code":""},{"path":"/reference/dot-update_table_data.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Replace/update table contents = input data must have ROW_ID and ROW_VERSION columns to update, otherwise will append data. — .update_table_data","text":"","code":".update_table_data(table_id, new_data, etag = NULL)"},{"path":"/reference/dot-update_table_data.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Replace/update table contents = input data must have ROW_ID and ROW_VERSION columns to update, otherwise will append data. — .update_table_data","text":"table_id synapse id table update. new_data updated table. etag etag latest version table. provided, query table_id retrieve latest etag.","code":""},{"path":"/reference/dot-update_view_data.html","id":null,"dir":"Reference","previous_headings":"","what":"Replace/update table contents = input data must have ROW_ID, ROW_VERSION, ETAG columns to update. — .update_view_data","title":"Replace/update table contents = input data must have ROW_ID, ROW_VERSION, ETAG columns to update. — .update_view_data","text":"Replace/update table contents = input data must ROW_ID, ROW_VERSION, ETAG columns update.","code":""},{"path":"/reference/dot-update_view_data.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Replace/update table contents = input data must have ROW_ID, ROW_VERSION, ETAG columns to update. — .update_view_data","text":"","code":".update_view_data(table_id, new_data)"},{"path":"/reference/dot-update_view_data.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Replace/update table contents = input data must have ROW_ID, ROW_VERSION, ETAG columns to update. — .update_view_data","text":"table_id synapse id table update. new_data updated table.","code":""},{"path":"/reference/dsp_dataset_mapping.html","id":null,"dir":"Reference","previous_headings":"","what":"Wrapper to create Data Sharing Plan to project dataset comparison chart — dsp_dataset_mapping","title":"Wrapper to create Data Sharing Plan to project dataset comparison chart — dsp_dataset_mapping","text":"Wrapper create Data Sharing Plan project dataset comparison chart","code":""},{"path":"/reference/dsp_dataset_mapping.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Wrapper to create Data Sharing Plan to project dataset comparison chart — dsp_dataset_mapping","text":"","code":"dsp_dataset_mapping(dsp_datasets, project_datasets)"},{"path":"/reference/dsp_dataset_mapping.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Wrapper to create Data Sharing Plan to project dataset comparison chart — dsp_dataset_mapping","text":"dsp_datasets Named vector datasets data sharing plan. project_datasets Named vector datasets project.","code":""},{"path":"/reference/dt_read.html","id":null,"dir":"Reference","previous_headings":"","what":"Download and read file to data.table — dt_read","title":"Download and read file to data.table — dt_read","text":"Convenience function reading delimited local file one Synapse.","code":""},{"path":"/reference/dt_read.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Download and read file to data.table — dt_read","text":"","code":"dt_read(file)"},{"path":"/reference/dt_read.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Download and read file to data.table — dt_read","text":"file File Synapse id local path.","code":""},{"path":"/reference/extract_syn_id_from_ss.html","id":null,"dir":"Reference","previous_headings":"","what":"Extract Synapse id from URI — extract_syn_id_from_ss","title":"Extract Synapse id from URI — extract_syn_id_from_ss","text":"Given vector x URIs/file paths, try different methods extract likely needed Synapse id, sanity checks results.","code":""},{"path":"/reference/extract_syn_id_from_ss.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Extract Synapse id from URI — extract_syn_id_from_ss","text":"","code":"extract_syn_id_from_ss(x)"},{"path":[]},{"path":"/reference/find_child.html","id":null,"dir":"Reference","previous_headings":"","what":"Find id of a child entity in a container — find_child","title":"Find id of a child entity in a container — find_child","text":"Find id child entity container","code":""},{"path":"/reference/find_child.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Find id of a child entity in a container — find_child","text":"","code":"find_child(child_name, parent)"},{"path":"/reference/find_child.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Find id of a child entity in a container — find_child","text":"child_name Name child entity. parent Parent container (project folder).","code":""},{"path":"/reference/find_child_type.html","id":null,"dir":"Reference","previous_headings":"","what":"Find children of type — find_child_type","title":"Find children of type — find_child_type","text":"Small utility like find_child retrieves files type rather specific name. Returns vector ids, entity names set names.","code":""},{"path":"/reference/find_child_type.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Find children of type — find_child_type","text":"","code":"find_child_type(parent, child_type = list(\"file\"))"},{"path":"/reference/find_child_type.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Find children of type — find_child_type","text":"parent Parent container (project folder). child_type Type(s) list, even one type. Defaults \"file\".","code":""},{"path":"/reference/find_data_root.html","id":null,"dir":"Reference","previous_headings":"","what":"Find data folder — find_data_root","title":"Find data folder — find_data_root","text":"Convenience function find data folder, can slight name variations, project.","code":""},{"path":"/reference/find_data_root.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Find data folder — find_data_root","text":"","code":"find_data_root(project_id)"},{"path":"/reference/find_data_root.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Find data folder — find_data_root","text":"project_id Synapse project id.","code":""},{"path":"/reference/find_in.html","id":null,"dir":"Reference","previous_headings":"","what":"Find in path — find_in","title":"Find in path — find_in","text":"Get Synapse id entity nested several folder layers deep without click UI create fileview long structure/path known.","code":""},{"path":"/reference/find_in.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Find in path — find_in","text":"","code":"find_in(scope, path)"},{"path":"/reference/find_in.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Find in path — find_in","text":"scope Id container (project folder) begin search. path Path string format \"subdir1/subdir2/file.txt\", last-level element id returned.","code":""},{"path":"/reference/find_nf_asset.html","id":null,"dir":"Reference","previous_headings":"","what":"Find a standard nextflow workflow output asset — find_nf_asset","title":"Find a standard nextflow workflow output asset — find_nf_asset","text":"Note samplesheets became part output newer versions nf-core/rna-seq; older runs may find samplesheets. Paths default known working paths corresponding latest major workflow version, may change may need updated part util maintenance.","code":""},{"path":"/reference/find_nf_asset.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Find a standard nextflow workflow output asset — find_nf_asset","text":"","code":"find_nf_asset( syn_out, asset = c(\"software_versions\", \"multiqc_report\", \"samplesheet\", \"samtools_stats\"), workflow = \"nf-rnaseq\" )"},{"path":"/reference/find_nf_asset.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Find a standard nextflow workflow output asset — find_nf_asset","text":"syn_out Id top-level folder corresponds publishDir nextflow workflow. asset Name asset find. workflow Specify workflow, \"nf-rnaseq\" \"nf-sarek\"; defaults \"nf-rnaseq\".","code":""},{"path":"/reference/find_nf_asset.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Find a standard nextflow workflow output asset — find_nf_asset","text":"Id samplesheet.","code":""},{"path":"/reference/find_parent.html","id":null,"dir":"Reference","previous_headings":"","what":"Find parent — find_parent","title":"Find parent — find_parent","text":"Find parent","code":""},{"path":"/reference/find_parent.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Find parent — find_parent","text":"","code":"find_parent(id)"},{"path":"/reference/find_parent.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Find parent — find_parent","text":"id Synapse id","code":""},{"path":"/reference/format_gene_expression_data.html","id":null,"dir":"Reference","previous_headings":"","what":"Format gene expression — format_gene_expression_data","title":"Format gene expression — format_gene_expression_data","text":"Format gene expression","code":""},{"path":"/reference/format_gene_expression_data.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Format gene expression — format_gene_expression_data","text":"","code":"format_gene_expression_data(file)"},{"path":"/reference/from_pubmed.html","id":null,"dir":"Reference","previous_headings":"","what":"Get publication metadata from PubMed — from_pubmed","title":"Get publication metadata from PubMed — from_pubmed","text":"Get publication metadata PubMed","code":""},{"path":"/reference/from_pubmed.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Get publication metadata from PubMed — from_pubmed","text":"","code":"from_pubmed(pmid)"},{"path":"/reference/from_pubmed.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Get publication metadata from PubMed — from_pubmed","text":"pmid PubMed id.","code":""},{"path":"/reference/from_pubmed.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Get publication metadata from PubMed — from_pubmed","text":"PMID found, return meta table w/ title journal author year pmid doi.","code":""},{"path":"/reference/gather_annotations.html","id":null,"dir":"Reference","previous_headings":"","what":"Internal helper for gathering annotations into a table using the REST API — gather_annotations","title":"Internal helper for gathering annotations into a table using the REST API — gather_annotations","text":"internal implementation works directly platform service JSON afford low-level control avoid Python-R object conversion differences reticulate versions.","code":""},{"path":"/reference/gather_annotations.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Internal helper for gathering annotations into a table using the REST API — gather_annotations","text":"","code":"gather_annotations(ids, list_sep = \", \")"},{"path":"/reference/gather_annotations.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Internal helper for gathering annotations into a table using the REST API — gather_annotations","text":"ids One ids. list_sep List separator list annotations.","code":""},{"path":"/reference/get_by_prop_from_json_schema.html","id":null,"dir":"Reference","previous_headings":"","what":"Look up connected nodes by specified property in JSON-LD schema — get_by_prop_from_json_schema","title":"Look up connected nodes by specified property in JSON-LD schema — get_by_prop_from_json_schema","text":"Use schematic-generated JSON-LD schema: given @id, get connected nodes specified prop (e.g. sms:something). Intended generic used define specific lookup utils. Can recursive lookup, though graph tree/acyclic (!). (Useful props dependsOn, make sense props rdfs:label.)","code":""},{"path":"/reference/get_by_prop_from_json_schema.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Look up connected nodes by specified property in JSON-LD schema — get_by_prop_from_json_schema","text":"","code":"get_by_prop_from_json_schema( id, prop, schema = \"https://raw.githubusercontent.com/nf-osi/nf-metadata-dictionary/main/NF.jsonld\", return_labels = TRUE, recursive = FALSE, result = NULL, rest = NULL )"},{"path":"/reference/get_by_prop_from_json_schema.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Look up connected nodes by specified property in JSON-LD schema — get_by_prop_from_json_schema","text":"id Id (@id) get range values; include prefix needed. prop Property; include prefix needed. schema Path (URL local) file schema read, schema list object. return_labels Return labels (default), otherwise ids connected nodes. recursive Recursive lookup? result Vector accumulated results; used recursive lookup. rest Vector remaining ids; used recursive lookup.","code":""},{"path":"/reference/get_cbio_filename.html","id":null,"dir":"Reference","previous_headings":"","what":"Get cBioPortal clinical file name based on clinical data type — get_cbio_filename","title":"Get cBioPortal clinical file name based on clinical data type — get_cbio_filename","text":"called wrapper write_cbio_clinical. Adapted https://github.com/Sage-Bionetworks/genie-erbb2-cbio/blob/develop/create_clinical.R#L411. Note clinical file types, PATIENT type can actually optional, (NF) currently use TIMELINE type, options simplified.","code":""},{"path":"/reference/get_cbio_filename.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Get cBioPortal clinical file name based on clinical data type — get_cbio_filename","text":"","code":"get_cbio_filename(clinical_type = c(\"SAMPLE\", \"PATIENT\"))"},{"path":"/reference/get_cbio_filename.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Get cBioPortal clinical file name based on clinical data type — get_cbio_filename","text":"clinical_type String representing cBioPortal clinical data type.","code":""},{"path":"/reference/get_cbio_filename.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Get cBioPortal clinical file name based on clinical data type — get_cbio_filename","text":"string","code":""},{"path":"/reference/get_dependency_from_json_schema.html","id":null,"dir":"Reference","previous_headings":"","what":"Get dependencies for node in JSON-LD schema — get_dependency_from_json_schema","title":"Get dependencies for node in JSON-LD schema — get_dependency_from_json_schema","text":"Shorthand getting props defined annotation template using get_by_prop_from_json_schema hood.","code":""},{"path":"/reference/get_dependency_from_json_schema.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Get dependencies for node in JSON-LD schema — get_dependency_from_json_schema","text":"","code":"get_dependency_from_json_schema( id, prop = \"sms:requiresDependency\", schema = \"https://raw.githubusercontent.com/nf-osi/nf-metadata-dictionary/main/NF.jsonld\", return_labels = TRUE, recursive = TRUE, result = NULL, rest = NULL )"},{"path":"/reference/get_dependency_from_json_schema.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Get dependencies for node in JSON-LD schema — get_dependency_from_json_schema","text":"id Id (@id) get range values; include prefix needed. prop Property; include prefix needed. schema Path (URL local) file schema read, schema list object. return_labels Return labels (default), otherwise ids connected nodes. recursive Recursive lookup? result Vector accumulated results; used recursive lookup. rest Vector remaining ids; used recursive lookup.","code":""},{"path":"/reference/get_doi_meta.html","id":null,"dir":"Reference","previous_headings":"","what":"Get DOI metadata if it exists — get_doi_meta","title":"Get DOI metadata if it exists — get_doi_meta","text":"Returns list metadata associated DOI exists, otherwise NULL. Currently usable certain entity types like files datasets, though revised make useful objects. Note: Internal/experimental use , production use.","code":""},{"path":"/reference/get_doi_meta.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Get DOI metadata if it exists — get_doi_meta","text":"","code":"get_doi_meta(id)"},{"path":"/reference/get_doi_meta.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Get DOI metadata if it exists — get_doi_meta","text":"id Dataset data.","code":""},{"path":"/reference/get_path.html","id":null,"dir":"Reference","previous_headings":"","what":"Get path for a Synapse id — get_path","title":"Get path for a Synapse id — get_path","text":"Small helper fun get path given Synapse entity.","code":""},{"path":"/reference/get_path.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Get path for a Synapse id — get_path","text":"","code":"get_path(id, path = NULL)"},{"path":"/reference/get_path.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Get path for a Synapse id — get_path","text":"id Synapse id. path Path builder value.","code":""},{"path":"/reference/get_project_wiki.html","id":null,"dir":"Reference","previous_headings":"","what":"Get wiki content of synapse project(s) — get_project_wiki","title":"Get wiki content of synapse project(s) — get_project_wiki","text":"Get wiki object text content (main page ). primarily helper function used QC may useful wiki analysis.","code":""},{"path":"/reference/get_project_wiki.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Get wiki content of synapse project(s) — get_project_wiki","text":"","code":"get_project_wiki(project_id, markdown = TRUE)"},{"path":"/reference/get_project_wiki.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Get wiki content of synapse project(s) — get_project_wiki","text":"project_id Character vector synapse project id(s) get wiki. markdown TRUE (default) return markdown text, else return full wiki object.","code":""},{"path":"/reference/get_project_wiki.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Get wiki content of synapse project(s) — get_project_wiki","text":"list storing wiki object markdown-formatted text.","code":""},{"path":"/reference/get_project_wiki.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Get wiki content of synapse project(s) — get_project_wiki","text":"","code":"if (FALSE) { # \\dontrun{ txt <- get_project_wiki(c(\"syn11374354\",\"syn2343195\")) } # }"},{"path":"/reference/get_valid_values_from_json_schema.html","id":null,"dir":"Reference","previous_headings":"","what":"Retrieve valid subclasses of a value in a JSON-LD schema — get_valid_values_from_json_schema","title":"Retrieve valid subclasses of a value in a JSON-LD schema — get_valid_values_from_json_schema","text":"Retrieve valid subclasses value JSON-LD schema generated schematic.","code":""},{"path":"/reference/get_valid_values_from_json_schema.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Retrieve valid subclasses of a value in a JSON-LD schema — get_valid_values_from_json_schema","text":"","code":"get_valid_values_from_json_schema( schema_url = \"https://raw.githubusercontent.com/nf-osi/nf-metadata-dictionary/main/NF.jsonld\", parent_name = \"DataType\", parent_context = \"bts\" )"},{"path":"/reference/get_valid_values_from_json_schema.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Retrieve valid subclasses of a value in a JSON-LD schema — get_valid_values_from_json_schema","text":"schema_url Default: NF-OSI JSON-LD schema. parent_name Default = DataType. value like find associated subclasses. parent_context Default = bts. JSON-LD context value question.","code":""},{"path":"/reference/get_valid_values_from_json_schema.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Retrieve valid subclasses of a value in a JSON-LD schema — get_valid_values_from_json_schema","text":"character vector values.","code":""},{"path":"/reference/grant_specific_file_access.html","id":null,"dir":"Reference","previous_headings":"","what":"Provide access to a specific set of files using a query result. — grant_specific_file_access","title":"Provide access to a specific set of files using a query result. — grant_specific_file_access","text":"Sets READ/DOWNLOAD permissions specific user team, provided vector entity IDs. Generally, set permissions way, can create many, many ACLs/\"local sharing settings\" need removed time data publication. However, time writing, one project (JHU Biobank) shares embargoed data required share specific subsets files needed data requestor (e.g. MPNST tumor data, RNA-seq data).","code":""},{"path":"/reference/grant_specific_file_access.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Provide access to a specific set of files using a query result. — grant_specific_file_access","text":"","code":"grant_specific_file_access( principal_id, entity_ids, create_dataset = F, project_id = NULL, dataset_name = NULL )"},{"path":"/reference/grant_specific_file_access.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Provide access to a specific set of files using a query result. — grant_specific_file_access","text":"principal_id Synapse team user id. entity_ids Vector entity ids. create_dataset Optionally, create dataset entity_ids, user can easily retrieve . project_id create_dataset=T, project create . dataset_name Optional name dataset created","code":""},{"path":"/reference/identify_read_pair.html","id":null,"dir":"Reference","previous_headings":"","what":"Identify read pair from string — identify_read_pair","title":"Identify read pair from string — identify_read_pair","text":"Identify read pair string","code":""},{"path":"/reference/identify_read_pair.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Identify read pair from string — identify_read_pair","text":"","code":"identify_read_pair(string)"},{"path":"/reference/identify_read_pair.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Identify read pair from string — identify_read_pair","text":"string filename string.","code":""},{"path":"/reference/identify_read_pair.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Identify read pair from string — identify_read_pair","text":"Returns read pair: 1, 2, NULL none detected.","code":""},{"path":"/reference/infer_data_type.html","id":null,"dir":"Reference","previous_headings":"","what":"Infer data type of a dataset folder — infer_data_type","title":"Infer data type of a dataset folder — infer_data_type","text":"Infer data type checking first files. TODO: Check dataType instead Component derive Component older files Component explicitly.","code":""},{"path":"/reference/infer_data_type.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Infer data type of a dataset folder — infer_data_type","text":"","code":"infer_data_type(dataset_id)"},{"path":"/reference/infer_data_type.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Infer data type of a dataset folder — infer_data_type","text":"dataset_id Optional, given fills manifest existing dataset instead generating blank manifest.","code":""},{"path":"/reference/infer_data_type.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Infer data type of a dataset folder — infer_data_type","text":"List structure list(result = result, notes = notes), result can NA.","code":""},{"path":"/reference/is_dataset.html","id":null,"dir":"Reference","previous_headings":"","what":"Check whether entity is dataset — is_dataset","title":"Check whether entity is dataset — is_dataset","text":"Check whether entity dataset","code":""},{"path":"/reference/is_dataset.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Check whether entity is dataset — is_dataset","text":"","code":"is_dataset(id)"},{"path":"/reference/is_dataset_collection.html","id":null,"dir":"Reference","previous_headings":"","what":"Check whether entity is dataset collection — is_dataset_collection","title":"Check whether entity is dataset collection — is_dataset_collection","text":"Check whether entity dataset collection","code":""},{"path":"/reference/is_dataset_collection.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Check whether entity is dataset collection — is_dataset_collection","text":"","code":"is_dataset_collection(id)"},{"path":"/reference/is_file.html","id":null,"dir":"Reference","previous_headings":"","what":"Check whether entity is file — is_file","title":"Check whether entity is file — is_file","text":"Check whether entity file","code":""},{"path":"/reference/is_file.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Check whether entity is file — is_file","text":"","code":"is_file(id)"},{"path":"/reference/is_valid_syn_id.html","id":null,"dir":"Reference","previous_headings":"","what":"Validate a Synapse ID — is_valid_syn_id","title":"Validate a Synapse ID — is_valid_syn_id","text":"Returns id valid, throws error .","code":""},{"path":"/reference/is_valid_syn_id.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Validate a Synapse ID — is_valid_syn_id","text":"","code":"is_valid_syn_id(id)"},{"path":"/reference/is_valid_syn_id.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Validate a Synapse ID — is_valid_syn_id","text":"id Id string.","code":""},{"path":"/reference/is_valid_team.html","id":null,"dir":"Reference","previous_headings":"","what":"Check that is valid team in Synapse — is_valid_team","title":"Check that is valid team in Synapse — is_valid_team","text":"Check valid team Synapse","code":""},{"path":"/reference/is_valid_team.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Check that is valid team in Synapse — is_valid_team","text":"","code":"is_valid_team(id)"},{"path":"/reference/is_valid_user.html","id":null,"dir":"Reference","previous_headings":"","what":"Check that is valid user in Synapse — is_valid_user","title":"Check that is valid user in Synapse — is_valid_user","text":"Check valid user Synapse","code":""},{"path":"/reference/is_valid_user.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Check that is valid user in Synapse — is_valid_user","text":"","code":"is_valid_user(id)"},{"path":"/reference/key_label_to_id.html","id":null,"dir":"Reference","previous_headings":"","what":"Query for schema key id given label — key_label_to_id","title":"Query for schema key id given label — key_label_to_id","text":"Utility translate label id using schematic-generated schema.","code":""},{"path":"/reference/key_label_to_id.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Query for schema key id given label — key_label_to_id","text":"","code":"key_label_to_id( label, prefixed = TRUE, schema = \"https://raw.githubusercontent.com/nf-osi/nf-metadata-dictionary/main/NF.jsonld\" )"},{"path":"/reference/key_label_to_id.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Query for schema key id given label — key_label_to_id","text":"label term label, .k.display name. prefixed Boolean indicate whether include namespace prefix return bare ID. Defaults TRUE. schema URL local path .jsonld file schema read .","code":""},{"path":"/reference/key_label_to_id.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Query for schema key id given label — key_label_to_id","text":"id found, \"bts:MyID\", otherwise empty character vector.","code":""},{"path":"/reference/latest_version.html","id":null,"dir":"Reference","previous_headings":"","what":"Get the latest version — latest_version","title":"Get the latest version — latest_version","text":"Get latest version, special handling semantics \"latest\" regarding new collection types. Datasets dataset collections always start draft unlike entities concept stable version \"real\" latest, might always exist. datasets/dataset collections latest version refers DRAFT, latest stable version versionNumber - 1 condition versionNumber greater equal 2. versionNumber = 1 isLatestVersion TRUE, means yet stable version. using stable version semantics, stable version exist error thrown.","code":""},{"path":"/reference/latest_version.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Get the latest version — latest_version","text":"","code":"latest_version(id, version_semantics = c(\"abs\", \"stable\"))"},{"path":"/reference/latest_version.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Get the latest version — latest_version","text":"id Dataset id. See details. version_semantics Use \"abs\" absolute latest version \"stable\". used collection entities. See details.","code":""},{"path":"/reference/latest_version.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Get the latest version — latest_version","text":"parameter version_semantics allows user specify \"type latest mean?\". Note: use versioned ids form \"syn12345678.3\"","code":""},{"path":"/reference/list_project_datasets.html","id":null,"dir":"Reference","previous_headings":"","what":"List datasets in project — list_project_datasets","title":"List datasets in project — list_project_datasets","text":"Return list datasets. Datasets can folders expected location project actual dataset entities. Note dataset-folders always exists first project, sort dataset precursor. files dataset-folders can translated dataset entities later. found, return NULL w/ explanatory message.","code":""},{"path":"/reference/list_project_datasets.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"List datasets in project — list_project_datasets","text":"","code":"list_project_datasets(project_id, type = c(\"folder\", \"dataset\"))"},{"path":"/reference/list_project_datasets.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"List datasets in project — list_project_datasets","text":"project_id Synapse project id. type Whether list datasets immediate folders \"Raw Data\" root (default, see details) actual dataset entities project.","code":""},{"path":"/reference/make_admin.html","id":null,"dir":"Reference","previous_headings":"","what":"Make a user or group full admin of a Synapse entity — make_admin","title":"Make a user or group full admin of a Synapse entity — make_admin","text":"Convenience method set admin permissions","code":""},{"path":"/reference/make_admin.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Make a user or group full admin of a Synapse entity — make_admin","text":"","code":"make_admin(entity, principal_id)"},{"path":"/reference/make_admin.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Make a user or group full admin of a Synapse entity — make_admin","text":"entity Synapse entity, e.g. project folder. principal_id User/team name id (e.g. \"NF-OSI Sage Team\", \"3378999\", \"nf-bot\", \"3423450\") configured access entity.","code":""},{"path":"/reference/make_cbio_clinical_header.html","id":null,"dir":"Reference","previous_headings":"","what":"Make header for cBioPortal clinical data file — make_cbio_clinical_header","title":"Make header for cBioPortal clinical data file — make_cbio_clinical_header","text":"called wrapper write_cbio_clinical. Adapted https://github.com/Sage-Bionetworks/genie-erbb2-cbio/blob/develop/create_clinical.R#L396. Needs data table clinical data reference providing label, description, data_type.","code":""},{"path":"/reference/make_cbio_clinical_header.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Make header for cBioPortal clinical data file — make_cbio_clinical_header","text":"","code":"make_cbio_clinical_header(df, mapping)"},{"path":"/reference/make_cbio_clinical_header.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Make header for cBioPortal clinical data file — make_cbio_clinical_header","text":"df data.frame representing clinical dataset publicize. mapping reference table providing label, description, data_type source attribute df.","code":""},{"path":"/reference/make_folder.html","id":null,"dir":"Reference","previous_headings":"","what":"Create project folders — make_folder","title":"Create project folders — make_folder","text":"Use set scaffold standard upper-level folders well customized data folders within \"Raw Data\" new project.","code":""},{"path":"/reference/make_folder.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Create project folders — make_folder","text":"","code":"make_folder(parent, folders)"},{"path":"/reference/make_folder.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Create project folders — make_folder","text":"parent Synapse id object parent container, .e. project another folder. folders List giving one folder names folder(s) create.","code":""},{"path":"/reference/make_folder.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Create project folders — make_folder","text":"list created folder object(s).","code":""},{"path":"/reference/make_folder.html","id":"ref-examples","dir":"Reference","previous_headings":"","what":"Examples","title":"Create project folders — make_folder","text":"","code":"if (FALSE) { # \\dontrun{ datasets <- list(\"sequencing data\", \"imaging data\") assays <- c(\"rnaSeq\", \"immunohistochemistry\") for(i in seq_along(datasets)) attr(datasets[[i]], \"assay\") <- assays[[i]] make_folder(parent = \"syn26462036\", datasets) } # }"},{"path":"/reference/make_meta_clinical_generic.html","id":null,"dir":"Reference","previous_headings":"","what":"Generic template for clinical data file — make_meta_clinical_generic","title":"Generic template for clinical data file — make_meta_clinical_generic","text":"Make meta file describe one clinical data files (e.g. SAMPLE, PATIENT). Adapted https://github.com/Sage-Bionetworks/genie-erbb2-cbio/blob/develop/make_meta.R#L65","code":""},{"path":"/reference/make_meta_clinical_generic.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Generic template for clinical data file — make_meta_clinical_generic","text":"","code":"make_meta_clinical_generic( cancer_study_identifier, genetic_alteration_type, datatype, data_filename )"},{"path":"/reference/make_meta_clinical_generic.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Generic template for clinical data file — make_meta_clinical_generic","text":"cancer_study_identifier study identifier. genetic_alteration_type cBioPortal generic alteration type. datatype cBioPortal data type data_filename. data_filename Name data file meta file describes.","code":""},{"path":"/reference/make_meta_cna.html","id":null,"dir":"Reference","previous_headings":"","what":"Make meta file for cBioPortal copy number alteration data — make_meta_cna","title":"Make meta file for cBioPortal copy number alteration data — make_meta_cna","text":"Currently assumes seg data extended later.","code":""},{"path":"/reference/make_meta_cna.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Make meta file for cBioPortal copy number alteration data — make_meta_cna","text":"","code":"make_meta_cna( cancer_study_identifier, data_filename = \"data_cna.seg\", reference_genome_id = \"hg19\", publish_dir = \".\", write = TRUE, verbose = TRUE )"},{"path":"/reference/make_meta_cna.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Make meta file for cBioPortal copy number alteration data — make_meta_cna","text":"See https://docs.cbioportal.org/file-formats/#segmented-data","code":""},{"path":"/reference/make_meta_expression.html","id":null,"dir":"Reference","previous_headings":"","what":"Make meta file for cBioPortal expression data — make_meta_expression","title":"Make meta file for cBioPortal expression data — make_meta_expression","text":"https://docs.cbioportal.org/file-formats/#expression-data","code":""},{"path":"/reference/make_meta_expression.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Make meta file for cBioPortal expression data — make_meta_expression","text":"","code":"make_meta_expression( cancer_study_identifier, type = \"raw\", data_filename = glue::glue(\"data_expression_{type}.txt\"), publish_dir = \".\", write = TRUE, verbose = TRUE )"},{"path":"/reference/make_meta_genomic_generic.html","id":null,"dir":"Reference","previous_headings":"","what":"Generic template for genomic-type data file — make_meta_genomic_generic","title":"Generic template for genomic-type data file — make_meta_genomic_generic","text":"Adapted https://github.com/Sage-Bionetworks/genie-erbb2-cbio/blob/develop/make_meta.R#L65 Internal workhorse union properties used genomic-type data file – sensible defaults/specific combination passed higher-level fun, e.g. make_meta_maf.","code":""},{"path":"/reference/make_meta_genomic_generic.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Generic template for genomic-type data file — make_meta_genomic_generic","text":"","code":"make_meta_genomic_generic( cancer_study_identifier, genetic_alteration_type, datatype, stable_id = NULL, reference_genome_id = NULL, profile_name = NULL, profile_description = NULL, data_filename )"},{"path":"/reference/make_meta_genomic_generic.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Generic template for genomic-type data file — make_meta_genomic_generic","text":"cancer_study_identifier study identifier. genetic_alteration_type cBioPortal generic alteration type. datatype cBioPortal data type data_filename. stable_id Stable id. reference_genome_id Reference genome id, e.g. 'hg19'. profile_name Name genomic profiling. set specific make_meta utility. example, \"Mutations\" make_*_maf \"Copy-number alterations\" make_*_cna. profile_description Brief description genomic profiling. set specific make_meta utility. data_filename Name data file meta file describes.","code":""},{"path":"/reference/make_meta_maf.html","id":null,"dir":"Reference","previous_headings":"","what":"Make meta file for maf — make_meta_maf","title":"Make meta file for maf — make_meta_maf","text":"Reused https://github.com/Sage-Bionetworks/genie-erbb2-cbio/blob/develop/make_meta.R#L157","code":""},{"path":"/reference/make_meta_maf.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Make meta file for maf — make_meta_maf","text":"","code":"make_meta_maf( cancer_study_identifier, data_filename = \"data_mutations.txt\", publish_dir = \".\", write = TRUE, verbose = TRUE )"},{"path":"/reference/make_meta_maf.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Make meta file for maf — make_meta_maf","text":"cancer_study_identifier study identifier. data_filename Name data file. Defaults \"data_mutations.txt\". publish_dir Directory path write , defaults current. write Whether write meta file data file. verbose Report file written.","code":""},{"path":"/reference/make_meta_patient.html","id":null,"dir":"Reference","previous_headings":"","what":"Make patient meta file — make_meta_patient","title":"Make patient meta file — make_meta_patient","text":"Adapted https://github.com/Sage-Bionetworks/genie-erbb2-cbio/blob/develop/create_meta.R#L101","code":""},{"path":"/reference/make_meta_patient.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Make patient meta file — make_meta_patient","text":"","code":"make_meta_patient( cancer_study_identifier, data_filename = \"data_clinical_patient.txt\", write = TRUE, publish_dir = \".\", verbose = TRUE )"},{"path":"/reference/make_meta_patient.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Make patient meta file — make_meta_patient","text":"cancer_study_identifier study identifier. data_filename Name data file meta file describes. write Whether write meta file clinical data file. publish_dir Directory path write , defaults current. verbose Report file written.","code":""},{"path":"/reference/make_meta_sample.html","id":null,"dir":"Reference","previous_headings":"","what":"Make sample meta file — make_meta_sample","title":"Make sample meta file — make_meta_sample","text":"Adapted https://github.com/Sage-Bionetworks/genie-erbb2-cbio/blob/develop/create_meta.R#L109","code":""},{"path":"/reference/make_meta_sample.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Make sample meta file — make_meta_sample","text":"","code":"make_meta_sample( cancer_study_identifier, data_filename = \"data_clinical_sample.txt\", publish_dir = \".\", write = TRUE, verbose = TRUE )"},{"path":"/reference/make_meta_sample.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Make sample meta file — make_meta_sample","text":"cancer_study_identifier study identifier. data_filename Name data file meta file describes. publish_dir Directory path write , defaults current. write Whether write meta file clinical data file. verbose Report file written.","code":""},{"path":"/reference/make_meta_study_generic.html","id":null,"dir":"Reference","previous_headings":"","what":"Template for meta study file — make_meta_study_generic","title":"Template for meta study file — make_meta_study_generic","text":"Adapted https://github.com/Sage-Bionetworks/genie-erbb2-cbio/blob/develop/create_meta.R#L90 Low-level internal function tedious templating.","code":""},{"path":"/reference/make_meta_study_generic.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Template for meta study file — make_meta_study_generic","text":"","code":"make_meta_study_generic( cancer_study_identifier, type_of_cancer, name, description, citation = NULL, pmid = NULL, groups = NULL, short_name = NULL, add_global_case_list = TRUE )"},{"path":"/reference/make_public.html","id":null,"dir":"Reference","previous_headings":"","what":"Make public — make_public","title":"Make public — make_public","text":"Sets READ/DOWNLOAD permissions web registered users equivalently \"Make Public\" button Synapse UI. TODO: regular users can one--done action, DCC admin likely entails actions, updating project tracking table, wrapper \"callback\" functionality might needed.","code":""},{"path":"/reference/make_public.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Make public — make_public","text":"","code":"make_public(id)"},{"path":"/reference/make_public.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Make public — make_public","text":"id Synapse entity id.","code":""},{"path":"/reference/make_public_viewable.html","id":null,"dir":"Reference","previous_headings":"","what":"Set public access to VIEW (READ) only for an entity — make_public_viewable","title":"Set public access to VIEW (READ) only for an entity — make_public_viewable","text":"Set registered users non-registered users VIEW-permissions. See codelinkmake_public permissive permissions download (registered users), usually set later data release time.","code":""},{"path":"/reference/make_public_viewable.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Set public access to VIEW (READ) only for an entity — make_public_viewable","text":"","code":"make_public_viewable(id)"},{"path":"/reference/make_public_viewable.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Set public access to VIEW (READ) only for an entity — make_public_viewable","text":"id Synapse entity id.","code":""},{"path":"/reference/manifest_generate.html","id":null,"dir":"Reference","previous_headings":"","what":"Generate manifest via schematic service — manifest_generate","title":"Generate manifest via schematic service — manifest_generate","text":"See schematic manifest generation. Note uses access token user already logged syn_login.","code":""},{"path":"/reference/manifest_generate.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Generate manifest via schematic service — manifest_generate","text":"","code":"manifest_generate( data_type, dataset_id = NULL, title = data_type, schema_url = \"https://raw.githubusercontent.com/nf-osi/nf-metadata-dictionary/main/NF.jsonld\", asset_view = \"syn16858331\", output_format = \"google_sheet\", use_annotations = TRUE, service = \"https://schematic.api.sagebionetworks.org/v1/manifest/generate\" )"},{"path":"/reference/manifest_generate.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Generate manifest via schematic service — manifest_generate","text":"data_type Data type manifest generate (aka Component). dataset_id Optional, given fills manifest existing dataset instead generating blank manifest. title Optional, custom title. schema_url Optional, defaults main NF 'latest' data model. asset_view Optional, defaults main NF portal fileview. output_format Format 'excel', 'google_sheet', 'dataframe'. Defaults 'excel'. use_annotations Use annotations filling manifest existing dataset. Defaults TRUE NF. service Service endpoint use. Defaults schematic production endpoint.","code":""},{"path":"/reference/manifest_generate.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Generate manifest via schematic service — manifest_generate","text":"excel, path local file; google_sheet, URL sheet; dataframe, JSON string data.","code":""},{"path":"/reference/manifest_passed.html","id":null,"dir":"Reference","previous_headings":"","what":"Provide a pass/fail summary result — manifest_passed","title":"Provide a pass/fail summary result — manifest_passed","text":"Provide pass/fail summary result","code":""},{"path":"/reference/manifest_passed.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Provide a pass/fail summary result — manifest_passed","text":"","code":"manifest_passed(result)"},{"path":"/reference/manifest_passed.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Provide a pass/fail summary result — manifest_passed","text":"result Result list data schematic service.","code":""},{"path":"/reference/manifest_passed.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Provide a pass/fail summary result — manifest_passed","text":"Boolean whether passed. List structure list(result = result, notes = notes), result indicates whether dataset passed.","code":""},{"path":"/reference/manifest_validate.html","id":null,"dir":"Reference","previous_headings":"","what":"Validate manifest via schematic service — manifest_validate","title":"Validate manifest via schematic service — manifest_validate","text":"See schematic validation. Get validation results schematic service. Downstream utils can consume results custom display/report.","code":""},{"path":"/reference/manifest_validate.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Validate manifest via schematic service — manifest_validate","text":"","code":"manifest_validate( data_type, json_str = NULL, file_name = NULL, restrict_rules = FALSE, schema_url = \"https://raw.githubusercontent.com/nf-osi/nf-metadata-dictionary/main/NF.jsonld\", service = \"https://schematic.api.sagebionetworks.org/v1/model/validate\" )"},{"path":"/reference/manifest_validate.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Validate manifest via schematic service — manifest_validate","text":"data_type Data type manifest generate (aka Component). json_str JSON string representing metadata. file_name Path file, .csv. Ignored json_str given. restrict_rules Use basic schematic validation instead extended validation Great Expectations, default FALSE. schema_url Optional, defaults main NF 'latest' data model. service Service endpoint use. Defaults schematic production endpoint.","code":""},{"path":"/reference/manifest_validate_wrapper.html","id":null,"dir":"Reference","previous_headings":"","what":"Validate with stated data_type in manifest — manifest_validate_wrapper","title":"Validate with stated data_type in manifest — manifest_validate_wrapper","text":"Validate stated data_type manifest","code":""},{"path":"/reference/manifest_validate_wrapper.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Validate with stated data_type in manifest — manifest_validate_wrapper","text":"","code":"manifest_validate_wrapper( csv_file, data_type = NULL, dataset_id = NULL, dataset_name = NULL )"},{"path":"/reference/manifest_validate_wrapper.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Validate with stated data_type in manifest — manifest_validate_wrapper","text":"csv_file Path manifest csv file. data_type Optional present manifest. dataset_id Optional dataset id. dataset_name Optional dataset name.","code":""},{"path":"/reference/map_reports_sarek.html","id":null,"dir":"Reference","previous_headings":"","what":"Map out Sarek report files — map_reports_sarek","title":"Map out Sarek report files — map_reports_sarek","text":"family helper funs annotate secondary report files certain nextflow workflows. Sarek, many report files conveniently outputted top-level \"Reports\" folder, organize reports sample tool (BFCTools, FastQC, etc.). example reference starting sample-level: https://www.synapse.org/#!Synapse:syn31665258 things \"Reports\" can indeed generally called \"workflow report\" (subclass \"report\" resource), files bamQC reports directory misc web assets (.css, .js, .gif, etc.) used HTML report. HTML reports asset files directly embedded/bundled .html file, misc files become extra annotation burden. Since debatable call something like .css file report, files classified instead \"report asset\".","code":""},{"path":"/reference/map_reports_sarek.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Map out Sarek report files — map_reports_sarek","text":"","code":"map_reports_sarek(syn_out, project)"},{"path":"/reference/map_reports_sarek.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Map out Sarek report files — map_reports_sarek","text":"syn_out Reports output folder set scope fileview. project Project put fileview.","code":""},{"path":"/reference/map_reports_sarek.html","id":"details","dir":"Reference","previous_headings":"","what":"Details","title":"Map out Sarek report files — map_reports_sarek","text":"Unlike map_* functions, requires fileview instead using walk create one.","code":""},{"path":"/reference/map_sample_input_ss.html","id":null,"dir":"Reference","previous_headings":"","what":"Parse nextflow samplesheet for sample inputs — map_sample_input_ss","title":"Parse nextflow samplesheet for sample inputs — map_sample_input_ss","text":"Samplesheets used rnaseq pipelines, defined : https://nf-co.re/rnaseq/usage#full-samplesheet. pipeline run, found output folder called pipeline_info. simple helper get mapping sample ids input files (either one--many one--one) table.","code":""},{"path":"/reference/map_sample_input_ss.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Parse nextflow samplesheet for sample inputs — map_sample_input_ss","text":"","code":"map_sample_input_ss( samplesheet, parse_fun = function(x) gsub(\"_T[0-9]$\", \"\", x) )"},{"path":"/reference/map_sample_input_ss.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Parse nextflow samplesheet for sample inputs — map_sample_input_ss","text":"samplesheet local file syn id samplesheet. parse_fun Function implementing parse samples samplesheet.","code":""},{"path":"/reference/map_sample_io.html","id":null,"dir":"Reference","previous_headings":"","what":"Map sample input-output — map_sample_io","title":"Map sample input-output — map_sample_io","text":"Wrapper map sample inputs outputs depending workflow type.","code":""},{"path":"/reference/map_sample_io.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Map sample input-output — map_sample_io","text":"","code":"map_sample_io(input, output)"},{"path":"/reference/map_sample_io.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Map sample input-output — map_sample_io","text":"input Data map_sample_input_ss. output Data map_sample_output_*.","code":""},{"path":"/reference/map_sample_io.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Map sample input-output — map_sample_io","text":"table includes sample level output_id output_name input_id.","code":""},{"path":"/reference/map_sample_output_rnaseq.html","id":null,"dir":"Reference","previous_headings":"","what":"Map sample to output from nf-rnaseq with path — map_sample_output_rnaseq","title":"Map sample to output from nf-rnaseq with path — map_sample_output_rnaseq","text":"See https://nf-co.re/rnaseq/docs/output/#pipeline-overview. workflow generates several types outputs can become (\"level 2\" \"level 3\") dataset products. (note: outputs may available depending workflow run indexed back).","code":""},{"path":"/reference/map_sample_output_rnaseq.html","id":"ref-usage","dir":"Reference","previous_headings":"","what":"Usage","title":"Map sample to output from nf-rnaseq with path — map_sample_output_rnaseq","text":"","code":"map_sample_output_rnaseq( syn_out, fileview, output = c(\"STAR and Salmon\", \"featureCounts\", \"SAMtools\") )"},{"path":"/reference/map_sample_output_rnaseq.html","id":"arguments","dir":"Reference","previous_headings":"","what":"Arguments","title":"Map sample to output from nf-rnaseq with path — map_sample_output_rnaseq","text":"syn_out Syn id variant calling output folder. fileview existing fileview use (usually project's local fileview) scopes outputs \"default\" columns (id, name, type, parentId, path, ...). See details. output output select annotate. Defaults output types unless limited election. \"STAR Salmon\" selects .sf files – typically considered main output. \"featureCounts\" selects relevant .txt files. \"SAMtools\" selects .bam/.bai indexed sorted SAMtools.","code":""},{"path":"/reference/map_sample_output_rnaseq.html","id":"value","dir":"Reference","previous_headings":"","what":"Value","title":"Map sample to output from nf-rnaseq with path — map_sample_output_rnaseq","text":"list data.tables columns output_name output_id sample workflow output type. attribute workflow=nf-rnaseq set returned list, elements attribute outputFrom set, e.g. outputFrom=SAMtools.","code":""},{"path":"/reference/map_sample_output_sarek.html","id":null,"dir":"Reference","previous_headings":"","what":"Map sample to output from nf-sarek — map_sample_output_sarek","title":"Map sample to output from nf-sarek — map_sample_output_sarek","text":"See https://nf-co.re/sarek. Similar map_sample_output_rnaseq Sarek outputs. Processed outputs seen variable organization, nested first sample caller VariantCalling/