Commit 39c0fe57 authored by Petar Horki's avatar Petar Horki

feat: merge of DataGroup branch changes

parent c7afff7b
importFrom("tibble", "tibble")
importFrom("tibble", "tribble")
importFrom("tibble", "as_tibble")
importFrom("tibble", "is_tibble")
importFrom("tibble", "has_name")
importFrom("tibble", "add_row")
importFrom("dplyr", "anti_join")
importFrom("dplyr", "bind_rows")
importFrom("dplyr", "bind_cols")
importFrom("dplyr", "distinct")
importFrom("dplyr", "filter")
importFrom("dplyr", "filter_")
importFrom("dplyr", "select")
importFrom("dplyr", "group_by_")
importFrom("dplyr", "mutate")
importFrom("dplyr", "bind_rows")
importFrom("dplyr", "bind_cols")
importFrom("dplyr", "row_number")
importFrom("dplyr", "distinct")
importFrom("dplyr", "anti_join")
importFrom("tidyr", "unite")
importFrom("tidyr", "unite_")
importFrom("tidyr", "nest")
importFrom("tidyr", "gather")
importFrom("tidyr", "gather_")
importFrom("purrr", "map")
importFrom("purrr", "pmap")
importFrom("purrr", "reduce")
importFrom("dplyr", "select")
importFrom("dplyr", "select_")
importFrom("dplyr", "ungroup")
importFrom("jsonlite", "toJSON")
importFrom("purrr", "%>%")
importFrom("purrr", "is_character")
importFrom("purrr", "is_logical")
importFrom("purrr", "is_atomic")
importFrom("purrr", "map")
importFrom("purrr", "pmap")
importFrom("purrr", "reduce")
importFrom("readr", "read_csv")
importFrom("readr", "cols")
importFrom("jsonlite", "toJSON")
importFrom("stringr", "str_split")
importFrom("tibble", "add_row")
importFrom("tibble", "as_tibble")
importFrom("tibble", "has_name")
importFrom("tibble", "is_tibble")
importFrom("tibble", "tibble")
importFrom("tibble", "tribble")
importFrom("tidyr", "gather")
importFrom("tidyr", "gather_")
importFrom("tidyr", "nest")
importFrom("tidyr", "separate_")
importFrom("tidyr", "unite")
importFrom("tidyr", "unite_")
exportPattern("^[[:alpha:]]+")
export(`_MapClass.Get`)
export(`_MapClass.Parse`)
......
This diff is collapsed.
default:
omop:
dbms: 'postgresql'
server: '127.0.0.1/OHDSI'
user: 'postgres'
server: '<SERVER_IP>/<DATABASE>'
user: '<USER>'
port: 5432
password: 'admin1'
schema: 'synpuf_cdm'
password: '<PASSWORD>'
schema: '<SCHEMA>'
opal:
baseURL: 'http://192.168.56.101:8080'
baseURL: 'https://<HOSTNAME>'
userName: 'administrator'
password: 'datashield_test&'
projectName: 'test'
password: '<PASSWORD>'
projectName: 'TestProject'
tableName: 'TestTable'
remoteDir: '/home/administrator/'
WebAPI:
baseURL: 'http://localhost:8080/WebAPI'
baseURL: 'http://localhost:8000/WebAPI'
{DEFAULT @limit = 0}
WITH koh AS (
SELECT subject_id, cohort_start_date, cohort_end_date
FROM @resultsSchema.cohort
WHERE @cohortDefinitionId = cohort_definition_id
), vis AS (
SELECT DISTINCT person_id, visit_occurrence_id
FROM koh
INNER JOIN
@cdmSchema.visit_occurrence AS v ON
koh.subject_id = v.person_id
AND v.visit_start_date >= cohort_start_date
AND v.visit_end_date <= cohort_end_date
)
SELECT
koh.subject_id,
vis.visit_occurrence_id,
cond.condition_occurrence_id,
cond.condition_concept_id
FROM koh
INNER JOIN
vis ON
koh.subject_id = vis.person_id
INNER JOIN
@cdmSchema.condition_occurrence AS cond ON
vis.visit_occurrence_id = cond.visit_occurrence_id
AND cond.condition_concept_id IN (@value)
{0 != @limit}?{ LIMIT @limit};
\ No newline at end of file
{DEFAULT @limit = 0}
WITH koh AS (
SELECT subject_id, cohort_start_date, cohort_end_date
FROM @resultsSchema.cohort
WHERE @cohortDefinitionId = cohort_definition_id
), vis AS (
SELECT DISTINCT person_id, visit_occurrence_id
FROM koh
INNER JOIN
@cdmSchema.visit_occurrence AS v ON
koh.subject_id = v.person_id
AND v.visit_start_date >= cohort_start_date
AND v.visit_end_date <= cohort_end_date
)
SELECT
koh.subject_id,
vis.visit_occurrence_id,
meas.measurement_id,
meas.measurement_concept_id,
meas.value_as_number
FROM koh
INNER JOIN
vis ON
koh.subject_id = vis.person_id
INNER JOIN
@cdmSchema.measurement AS meas ON
vis.visit_occurrence_id = meas.visit_occurrence_id
AND meas.measurement_concept_id IN (@value)
{0 != @limit}?{ LIMIT @limit};
\ No newline at end of file
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment