Global Pick Database (detections the really easy way)#

Author: Amanda M. Thomas (modified from something Yiyu Ni sent over Slack) Goal: A quick PickDB demonstration.
import datetime
import pandas as pd
base_url = "https://dasway.ess.washington.edu/quakescope/service/picks/query"
starttime = "2022-12-20"
endtime = "2022-12-21"
# Convert starttime to datetime and get Julian day
dt = datetime.datetime.strptime(starttime, "%Y-%m-%d")
julian_day = dt.timetuple().tm_yday
print("Julian Day:", julian_day)
# Construct URL (remove quotes around dates unless required by API)
url = f"{base_url}?tid=PB.B047.&start_time={starttime}&end_time={endtime}&limit=1000"
# Load data
detections = pd.read_csv(url, delimiter="|")
print(detections.head())
Julian Day: 354
trace_id network_code station_code location_code channel \
0 PB.B047. PB B047 NaN EH
1 PB.B047. PB B047 NaN EH
2 PB.B047. PB B047 NaN EH
3 PB.B047. PB B047 NaN EH
4 PB.B047. PB B047 NaN EH
start_time peak_time end_time \
0 2022-12-20 00:09:53.650 2022-12-20 00:09:53.850 2022-12-20 00:09:54.230
1 2022-12-20 00:14:08.630 2022-12-20 00:14:08.800 2022-12-20 00:14:09.270
2 2022-12-20 00:26:58.150 2022-12-20 00:26:58.380 2022-12-20 00:26:58.610
3 2022-12-20 00:38:18.630 2022-12-20 00:38:18.750 2022-12-20 00:38:19.030
4 2022-12-20 00:38:58.890 2022-12-20 00:38:59.050 2022-12-20 00:38:59.050
confidence amplitude phase
0 0.300115 0.000021 P
1 0.261277 0.000014 P
2 0.311770 0.000016 P
3 0.223382 0.000018 P
4 0.340299 0.000020 P
# Create output filename
output_file = f"pickdb_detections_{detections.iloc[0]['network_code']}.{detections.iloc[0]['station_code']}.{dt.year}.{julian_day:03d}.csv"
! pwd
# Save selected columns to CSV
detections[['phase', 'peak_time', 'confidence']].to_csv(output_file, index=False)
/Users/amt/Documents/2025_ML_TSC/notebooks/Amanda