Skip to content

Commit 7b70f5f

Browse files
committed
review feedback. simplify and clean up rvdss server code
1 parent 9368b63 commit 7b70f5f

File tree

3 files changed

+62
-70
lines changed

3 files changed

+62
-70
lines changed

src/client/delphi_epidata.py

Lines changed: 4 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -688,30 +688,25 @@ def rvdss(
688688
# Check parameters
689689
if None in (geo_type, time_values, geo_value):
690690
raise EpidataBadRequestException(
691-
"`geo_type`, `time_values`, and `geo_value` are all required"
691+
"`geo_type`, `geo_value`, and `time_values` are all required"
692692
)
693693

694694
# Set up request
695695
params = {
696-
"data_source": data_source,
697-
"signals": Epidata._list(signals),
698-
"time_type": time_type,
699-
"geo_type": geo_type,
700-
"time_values": Epidata._list(time_values),
701696
# Fake a time type param so that we can use some helper functions later.
702697
"time_type": "week",
698+
"geo_type": geo_type,
699+
"time_values": Epidata._list(time_values),
703700
}
704701

705702
if isinstance(geo_value, (list, tuple)):
706703
params["geo_values"] = ",".join(geo_value)
707704
else:
708-
params["geo_value"] = geo_value
705+
params["geo_values"] = geo_value
709706
if as_of is not None:
710707
params["as_of"] = as_of
711708
if issues is not None:
712709
params["issues"] = Epidata._list(issues)
713-
# if lag is not None:
714-
# params["lag"] = lag
715710

716711
# Make the API call
717712
return Epidata._request("rvdss", params)

src/server/_query.py

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -483,17 +483,25 @@ def apply_issues_filter(self, history_table: str, issues: Optional[TimeValues])
483483
self.where_integers("issue", issues)
484484
return self
485485

486-
def apply_as_of_filter(self, history_table: str, as_of: Optional[int], use_source_signal: Optional[bool] = TRUE) -> "QueryBuilder":
486+
# Note: This function was originally only used by covidcast, so it assumed
487+
# that `source` and `signal` columns are always available. To make this usable
488+
# for rvdss, too, which doesn't have the `source` or `signal` columns, add the
489+
# `use_source_signal` flag to toggle inclusion of `source` and `signal` columns.
490+
#
491+
# `use_source_signal` defaults to True so if not passed, the function
492+
# retains the historical behavior.
493+
def apply_as_of_filter(self, history_table: str, as_of: Optional[int], use_source_signal: Optional[bool] = True) -> "QueryBuilder":
487494
if as_of is not None:
488495
self.retable(history_table)
489496
sub_condition_asof = "(issue <= :as_of)"
490497
self.params["as_of"] = as_of
491498

492499
alias = self.alias
493500
source_signal_plain = source_signal_alias = ""
501+
# If `use_source_signal` toggled on, append `source` and `signal` columns to group-by list and join list.
494502
if use_source_signal:
495503
source_signal_plain = ", `source`, `signal`"
496-
source_signal_alias = f"AND x.source = {alias}.source AND x.signal = {alias}.signal"
504+
source_signal_alias = f" AND x.source = {alias}.source AND x.signal = {alias}.signal"
497505

498506
sub_fields = f"max(issue) max_issue, time_type, time_value{source_signal_plain}, geo_type, geo_value"
499507
sub_group = f"time_type, time_value{source_signal_plain}, geo_type, geo_value"

src/server/endpoints/rvdss.py

Lines changed: 48 additions & 59 deletions
Original file line numberDiff line numberDiff line change
@@ -1,33 +1,27 @@
11
from flask import Blueprint, request
22

3-
from .._params import extract_integers, extract_strings, extract_date, extract_dates
3+
from .._params import (
4+
extract_date,
5+
extract_dates,
6+
extract_strings,
7+
parse_time_set,
8+
)
49
from .._query import execute_query, QueryBuilder
510
from .._validate import require_all
611

712
bp = Blueprint("rvdss", __name__)
813

914
db_table_name = "rvdss"
1015

11-
def parse_time_set_epiweeks() -> TimeSet:
12-
require_all(request, "epiweeks")
13-
time_values = extract_dates("epiweeks")
14-
if time_values == ["*"]:
15-
return TimeSet("week", True)
16-
return TimeSet("week", time_values)
17-
18-
return parse_time_arg()
19-
20-
2116
@bp.route("/", methods=("GET", "POST"))
2217
def handle():
23-
require_all(request, "epiweeks", "geo_type", "geo_values")
18+
require_all(request, "time_type", "time_values", "geo_type", "geo_values")
2419

25-
# epiweeks = extract_integers("epiweeks")
26-
time_set = parse_time_set_epiweeks()
27-
geo_sets = parse_geo_sets()
28-
# issues = extract_integers("issues") # TODO
20+
time_set = parse_time_set()
21+
geo_type = extract_strings("geo_type")
22+
geo_values = extract_strings("geo_values")
2923
issues = extract_dates("issues")
30-
# as_of = extract_date("as_of")
24+
as_of = extract_date("as_of")
3125

3226
# basic query info
3327
q = QueryBuilder(db_table_name, "rv")
@@ -36,73 +30,68 @@ def handle():
3630
"geo_type",
3731
"geo_value",
3832
"region",
33+
"time_type",
3934
]
4035
fields_int = [
4136
"epiweek",
37+
"time_value",
38+
"issue",
4239
"week",
4340
"weekorder",
4441
"year",
45-
"time_value",
46-
"issue",
4742
]
4843
fields_float = [
49-
"sarscov2_tests",
50-
"sarscov2_positive_tests",
51-
"sarscov2_pct_positive",
44+
"adv_pct_positive",
45+
"adv_positive_tests",
46+
"adv_tests",
47+
"evrv_pct_positive",
48+
"evrv_positive_tests",
49+
"evrv_tests",
50+
"flu_pct_positive",
51+
"flu_positive_tests",
5252
"flu_tests",
53+
"flua_pct_positive",
54+
"flua_positive_tests",
5355
"flua_tests",
54-
"flub_tests",
55-
"flu_positive_tests",
56-
"flu_pct_positive",
5756
"fluah1n1pdm09_positive_tests",
5857
"fluah3_positive_tests",
5958
"fluauns_positive_tests",
60-
"flua_positive_tests",
61-
"flua_pct_positive",
62-
"flub_positive_tests",
6359
"flub_pct_positive",
64-
"rsv_tests",
65-
"rsv_positive_tests",
66-
"rsv_pct_positive",
67-
"hpiv_tests",
60+
"flub_positive_tests",
61+
"flub_tests",
62+
"hcov_pct_positive",
63+
"hcov_positive_tests",
64+
"hcov_tests",
65+
"hmpv_pct_positive",
66+
"hmpv_positive_tests",
67+
"hmpv_tests",
6868
"hpiv1_positive_tests",
6969
"hpiv2_positive_tests",
7070
"hpiv3_positive_tests",
7171
"hpiv4_positive_tests",
72-
"hpivother_positive_tests",
73-
"hpiv_positive_tests",
7472
"hpiv_pct_positive",
75-
"adv_tests",
76-
"adv_positive_tests",
77-
"adv_pct_positive",
78-
"hmpv_tests",
79-
"hmpv_positive_tests",
80-
"hmpv_pct_positive",
81-
"evrv_tests",
82-
"evrv_positive_tests",
83-
"evrv_pct_positive",
84-
"hcov_tests",
85-
"hcov_positive_tests",
86-
"hcov_pct_positive",
73+
"hpiv_positive_tests",
74+
"hpiv_tests",
75+
"hpivother_positive_tests",
76+
"rsv_pct_positive",
77+
"rsv_positive_tests",
78+
"rsv_tests",
79+
"sarscov2_pct_positive",
80+
"sarscov2_positive_tests",
81+
"sarscov2_tests",
8782
]
8883

89-
q.set_sort_order("epiweek", "time_value" "geo_type", "geo_value", "issue")
84+
q.set_sort_order("epiweek", "time_value", "geo_type", "geo_value", "issue")
9085
q.set_fields(fields_string, fields_int, fields_float)
9186

92-
# q.where_strings("geo_type", geo_type)
93-
q.apply_geo_filters("geo_type", "geo_value", geo_sets)
94-
# TODO: can only use this if have a time_type field in data
95-
# q.apply_time_filter("time_type", "time_value", time_set)
96-
q.where_integers("epiweek", epiweeks)
87+
q.where_strings("geo_type", geo_type)
88+
# Only apply geo_values filter if wildcard "*" was NOT used.
89+
if not (len(geo_values) == 1 and geo_values[0] == "*"):
90+
q.where_strings("geo_value", geo_values)
9791

92+
q.apply_time_filter("time_type", "time_value", time_set)
9893
q.apply_issues_filter(db_table_name, issues)
99-
# TODO: can only use this if have a time_type field in data
100-
# q.apply_as_of_filter(db_table_name, as_of, use_source_signal = FALSE)
101-
102-
# if issues is not None:
103-
# q.where_integers("issue", issues)
104-
# else:
105-
# q.with_max_issue("epiweek", "geo_value")
94+
q.apply_as_of_filter(db_table_name, as_of, use_source_signal = False)
10695

10796
# send query
10897
return execute_query(str(q), q.params, fields_string, fields_int, fields_float)

0 commit comments

Comments
 (0)