diff --git a/exercises/18_GPS_Field_QA/scripts/import_satellite_logs.pl b/exercises/18_GPS_Field_QA/scripts/import_satellite_logs.pl new file mode 100644 index 0000000..f832f3b --- /dev/null +++ b/exercises/18_GPS_Field_QA/scripts/import_satellite_logs.pl @@ -0,0 +1,362 @@ +#!/usr/bin/env perl +# 20260406 ChatGPT +# $Header$ +# +# Example: +# perl import_satellite_logs.pl \ +# --dbname satellite_data \ +# --host localhost \ +# --user jlpoole \ +# --schema public \ +# /path/to/20260406_175441_GUY.csv +# +# Notes: +# * Imports one or more CSV files into tables logs and log_data. +# * Preserves all leading hash-prefixed header lines in logs.raw_header_text. +# * Uses the file's own CSV header row when present; otherwise falls back to +# the expected project header defined in this script. + +use strict; +use warnings; +use utf8; + +use DBI; +use Digest::SHA qw(sha256_hex); +use File::Basename qw(basename); +use Getopt::Long qw(GetOptions); +use Text::CSV_XS; + +my $DEFAULT_HEADER = join ',', qw( +record_type timestamp_utc board_id gnss_chip firmware_exercise_name firmware_version +boot_timestamp_utc run_id fix_type fix_dimension sats_in_view sat_seen sats_used +hdop vdop pdop latitude longitude altitude_m speed_mps course_deg pps_seen +quality_class gps_count galileo_count glonass_count beidou_count navic_count +qzss_count sbas_count mean_cn0 max_cn0 age_of_fix_ms ttff_ms longest_no_fix_ms +sat_talker sat_constellation sat_prn sat_elevation_deg sat_azimuth_deg sat_snr +sat_used_in_solution +); + +my %opt = ( + dbname => 'satellite_data', + host => 'localhost', + port => 5432, + schema => 'public', +); + +GetOptions( + 'dbname=s' => \$opt{dbname}, + 'host=s' => \$opt{host}, + 'port=i' => \$opt{port}, + 'user=s' => \$opt{user}, + 'password=s' => \$opt{password}, + 'schema=s' => \$opt{schema}, + 'header-line=s' => \$opt{header_line}, + 'notes=s' => \$opt{import_notes}, + 'help' => \$opt{help}, +) or die usage(); + +if ($opt{help} || !@ARGV) { + print usage(); + exit 0; +} + +my $dsn = sprintf 'dbi:Pg:dbname=%s;host=%s;port=%d', + $opt{dbname}, $opt{host}, $opt{port}; + +my %dbh_attr = ( + RaiseError => 1, + AutoCommit => 1, + PrintError => 0, + pg_enable_utf8 => 1, +); + +my $dbh = DBI->connect($dsn, $opt{user}, $opt{password}, \%dbh_attr) + or die "Unable to connect to PostgreSQL\n"; + +$dbh->do(sprintf 'SET search_path TO %s', $dbh->quote_identifier($opt{schema})); + +for my $file (@ARGV) { + import_file($dbh, $file, \%opt); +} + +$dbh->disconnect; +exit 0; + +sub usage { + return <<'USAGE'; +Usage: + perl import_satellite_logs.pl [options] file1.csv [file2.csv ...] + +Options: + --dbname NAME PostgreSQL database name. Default: satellite_data + --host HOST PostgreSQL host. Default: localhost + --port PORT PostgreSQL port. Default: 5432 + --user USER PostgreSQL user name + --password PASS PostgreSQL password + --schema NAME Target schema. Default: public + --header-line TEXT Override the expected CSV header line when file lacks one + --notes TEXT Import notes stored in logs.import_notes + --help Show this help text +USAGE +} + +sub import_file { + my ($dbh, $file, $opt) = @_; + + open my $fh, '<:encoding(UTF-8)', $file + or die "Unable to open $file: $!\n"; + + my $file_text = do { local $/; <$fh> }; + close $fh; + + my $sha256 = sha256_hex($file_text); + my $file_size_bytes = length $file_text; + + my @lines = split /\n/, $file_text, -1; + my @comment_lines; + my $header_line; + my @data_lines; + my $saw_header = 0; + + while (@lines) { + my $line = shift @lines; + next if !defined $line; + + if ($line =~ /^#/) { + push @comment_lines, $line; + next; + } + + if ($line =~ /^\s*$/ && !@data_lines && !$saw_header) { + next; + } + + if (!$saw_header && $line =~ /^record_type,/) { + $header_line = $line; + $saw_header = 1; + next; + } + + push @data_lines, $line; + push @data_lines, @lines; + last; + } + + @data_lines = grep { defined $_ && $_ !~ /^\s*$/ } @data_lines; + + $header_line ||= $opt->{header_line} || $DEFAULT_HEADER; + + my $raw_header_text = @comment_lines ? join("\n", @comment_lines) . "\n" : undef; + + my $csv = Text::CSV_XS->new({ + binary => 1, + auto_diag => 1, + allow_loose_quotes => 1, + allow_loose_escapes => 1, + }); + + $csv->parse($header_line); + my @header = $csv->fields; + + my %idx; + for my $i (0 .. $#header) { + $idx{$header[$i]} = $i; + } + + my @required = qw(record_type timestamp_utc board_id gnss_chip run_id); + for my $name (@required) { + die "Header is missing required column: $name\n" if !exists $idx{$name}; + } + + $dbh->begin_work; + + my $log_insert_sql = <<'SQL'; +INSERT INTO logs ( + source_filename, + source_path, + file_sha256, + file_size_bytes, + raw_header_text, + csv_header_line, + import_notes +) VALUES (?, ?, ?, ?, ?, ?, ?) +RETURNING log_id +SQL + + my $log_sth = $dbh->prepare($log_insert_sql); + $log_sth->execute( + basename($file), + $file, + $sha256, + $file_size_bytes, + $raw_header_text, + $header_line, + $opt->{import_notes}, + ); + my ($log_id) = $log_sth->fetchrow_array; + + my $data_insert_sql = <<'SQL'; +INSERT INTO log_data ( + log_id, row_num, record_type, timestamp_utc, board_id, gnss_chip, + firmware_exercise_name, firmware_version, boot_timestamp_utc, run_id, + fix_type, fix_dimension, sats_in_view, sat_seen, sats_used, + hdop, vdop, pdop, latitude, longitude, altitude_m, speed_mps, course_deg, + pps_seen, quality_class, gps_count, galileo_count, glonass_count, + beidou_count, navic_count, qzss_count, sbas_count, mean_cn0, max_cn0, + age_of_fix_ms, ttff_ms, longest_no_fix_ms, sat_talker, sat_constellation, + sat_prn, sat_elevation_deg, sat_azimuth_deg, sat_snr, sat_used_in_solution +) VALUES ( + ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, + ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ? +) +SQL + + my $data_sth = $dbh->prepare($data_insert_sql); + + my ($row_count, $sample_count, $satellite_count) = (0, 0, 0); + my ($first_ts, $last_ts); + my ($board_id, $gnss_chip, $firmware_exercise_name, $firmware_version, $boot_ts, $run_id); + + ROW: + for my $line (@data_lines) { + next ROW if $line =~ /^\s*$/; + + $csv->parse($line); + my @f = $csv->fields; + + my %row; + for my $name (@header) { + my $value = $f[$idx{$name}]; + $row{$name} = normalize_value($value); + } + + ++$row_count; + ++$sample_count if defined $row{record_type} && $row{record_type} eq 'sample'; + ++$satellite_count if defined $row{record_type} && $row{record_type} eq 'satellite'; + + $first_ts //= $row{timestamp_utc}; + $last_ts = $row{timestamp_utc} if defined $row{timestamp_utc}; + + $board_id //= $row{board_id}; + $gnss_chip //= $row{gnss_chip}; + $firmware_exercise_name //= $row{firmware_exercise_name}; + $firmware_version //= $row{firmware_version}; + $boot_ts //= $row{boot_timestamp_utc}; + $run_id //= $row{run_id}; + + $data_sth->execute( + $log_id, + $row_count, + $row{record_type}, + $row{timestamp_utc}, + $row{board_id}, + $row{gnss_chip}, + $row{firmware_exercise_name}, + $row{firmware_version}, + $row{boot_timestamp_utc}, + $row{run_id}, + $row{fix_type}, + to_int($row{fix_dimension}), + to_int($row{sats_in_view}), + to_int($row{sat_seen}), + to_int($row{sats_used}), + to_num($row{hdop}), + to_num($row{vdop}), + to_num($row{pdop}), + to_num($row{latitude}), + to_num($row{longitude}), + to_num($row{altitude_m}), + to_num($row{speed_mps}), + to_num($row{course_deg}), + to_bool($row{pps_seen}), + $row{quality_class}, + to_int($row{gps_count}), + to_int($row{galileo_count}), + to_int($row{glonass_count}), + to_int($row{beidou_count}), + to_int($row{navic_count}), + to_int($row{qzss_count}), + to_int($row{sbas_count}), + to_num($row{mean_cn0}), + to_num($row{max_cn0}), + to_int($row{age_of_fix_ms}), + to_int($row{ttff_ms}), + to_int($row{longest_no_fix_ms}), + $row{sat_talker}, + $row{sat_constellation}, + to_int($row{sat_prn}), + to_int($row{sat_elevation_deg}), + to_int($row{sat_azimuth_deg}), + to_num($row{sat_snr}), + to_bool($row{sat_used_in_solution}), + ); + } + + my $update_sql = <<'SQL'; +UPDATE logs + SET board_id = ?, + gnss_chip = ?, + firmware_exercise_name = ?, + firmware_version = ?, + boot_timestamp_utc = ?, + run_id = ?, + first_timestamp_utc = ?, + last_timestamp_utc = ?, + row_count = ?, + sample_count = ?, + satellite_count = ? + WHERE log_id = ? +SQL + + my $update_sth = $dbh->prepare($update_sql); + $update_sth->execute( + $board_id, + $gnss_chip, + $firmware_exercise_name, + $firmware_version, + $boot_ts, + $run_id, + $first_ts, + $last_ts, + $row_count, + $sample_count, + $satellite_count, + $log_id, + ); + + $dbh->commit; + + print STDERR sprintf( + "Imported %s => log_id=%d rows=%d samples=%d satellites=%d\n", + $file, $log_id, $row_count, $sample_count, $satellite_count, + ); +} + +sub normalize_value { + my ($value) = @_; + return undef if !defined $value; + $value =~ s/^\s+//; + $value =~ s/\s+$//; + return undef if $value eq ''; + return $value; +} + +sub to_int { + my ($value) = @_; + return undef if !defined $value; + return int($value); +} + +sub to_num { + my ($value) = @_; + return undef if !defined $value; + return $value + 0; +} + +sub to_bool { + my ($value) = @_; + return undef if !defined $value; + return 1 if $value =~ /^(?:1|true|t|yes|y)$/i; + return 0 if $value =~ /^(?:0|false|f|no|n)$/i; + return undef; +} diff --git a/exercises/18_GPS_Field_QA/sql/satellite_data_schema.sql b/exercises/18_GPS_Field_QA/sql/satellite_data_schema.sql new file mode 100644 index 0000000..69b888d --- /dev/null +++ b/exercises/18_GPS_Field_QA/sql/satellite_data_schema.sql @@ -0,0 +1,293 @@ +-- 20260406 ChatGPT +-- $Header$ +-- +-- Example: +-- createdb satellite_data +-- psql -d satellite_data -f satellite_data_schema.sql +-- +-- Purpose: +-- Schema for importing GNSS field QA CSV logs generated by T-Beam units. +-- A log file is recorded in table logs, and each CSV row is stored in +-- table log_data with a foreign-key reference back to logs. + +BEGIN; + +-- Optional convenience note: +-- CREATE DATABASE satellite_data; + +CREATE TABLE IF NOT EXISTS logs ( + log_id bigserial PRIMARY KEY, + source_filename text NOT NULL, + source_path text, + file_sha256 text, + file_size_bytes bigint, + raw_header_text text, + csv_header_line text NOT NULL, + imported_at timestamptz NOT NULL DEFAULT now(), + import_notes text, + + board_id text, + gnss_chip text, + firmware_exercise_name text, + firmware_version text, + boot_timestamp_utc timestamptz, + run_id text, + first_timestamp_utc timestamptz, + last_timestamp_utc timestamptz, + + row_count integer NOT NULL DEFAULT 0, + sample_count integer NOT NULL DEFAULT 0, + satellite_count integer NOT NULL DEFAULT 0, + + CONSTRAINT logs_source_filename_ck CHECK (btrim(source_filename) <> ''), + CONSTRAINT logs_csv_header_line_ck CHECK (btrim(csv_header_line) <> ''), + CONSTRAINT logs_row_count_ck CHECK (row_count >= 0), + CONSTRAINT logs_sample_count_ck CHECK (sample_count >= 0), + CONSTRAINT logs_satellite_count_ck CHECK (satellite_count >= 0) +); + +COMMENT ON TABLE logs IS +'One row per imported CSV file. Stores file-level provenance, importer notes, raw hash-prefixed header text, the effective CSV column header line, and summary counts for the import.'; + +COMMENT ON COLUMN logs.log_id IS +'Surrogate primary key for one imported log file.'; +COMMENT ON COLUMN logs.source_filename IS +'Base filename of the imported CSV file, such as 20260406_175441_GUY.csv.'; +COMMENT ON COLUMN logs.source_path IS +'Full or relative filesystem path used at import time.'; +COMMENT ON COLUMN logs.file_sha256 IS +'SHA-256 digest of the file contents for provenance and duplicate detection.'; +COMMENT ON COLUMN logs.file_size_bytes IS +'File size in bytes at import time.'; +COMMENT ON COLUMN logs.raw_header_text IS +'All leading lines in the source file whose first character is #. This is the free-form metadata header preserved exactly as found.'; +COMMENT ON COLUMN logs.csv_header_line IS +'The effective CSV column header line used for import. This may come from the file itself or from the importer''s expected header when the file has no explicit header row.'; +COMMENT ON COLUMN logs.imported_at IS +'UTC timestamp when the file was imported into PostgreSQL.'; +COMMENT ON COLUMN logs.import_notes IS +'Optional notes about the import, schema assumptions, or anomalies observed during ingestion.'; +COMMENT ON COLUMN logs.board_id IS +'Identifier of the T-Beam or other logger board, for example GUY, AMY, or CY, copied from the data rows when available.'; +COMMENT ON COLUMN logs.gnss_chip IS +'GNSS receiver model reported by the firmware, such as MAX-M10S or L76K.'; +COMMENT ON COLUMN logs.firmware_exercise_name IS +'Firmware exercise or program name that generated the log, useful for tracking logger behavior across exercises.'; +COMMENT ON COLUMN logs.firmware_version IS +'Firmware version string or build identifier reported by the logger.'; +COMMENT ON COLUMN logs.boot_timestamp_utc IS +'UTC timestamp that the device believed it booted. Useful for relating run timing back to power-up timing.'; +COMMENT ON COLUMN logs.run_id IS +'Run/session identifier generated by the firmware for this data collection session.'; +COMMENT ON COLUMN logs.first_timestamp_utc IS +'Earliest timestamp_utc found in the imported data rows.'; +COMMENT ON COLUMN logs.last_timestamp_utc IS +'Latest timestamp_utc found in the imported data rows.'; +COMMENT ON COLUMN logs.row_count IS +'Total number of imported data rows for this file, including both sample and satellite rows.'; +COMMENT ON COLUMN logs.sample_count IS +'Count of rows whose record_type is sample.'; +COMMENT ON COLUMN logs.satellite_count IS +'Count of rows whose record_type is satellite.'; + +CREATE UNIQUE INDEX IF NOT EXISTS logs_source_sha256_uq + ON logs (file_sha256) + WHERE file_sha256 IS NOT NULL; + +CREATE INDEX IF NOT EXISTS logs_run_id_idx + ON logs (run_id); + +CREATE INDEX IF NOT EXISTS logs_board_id_idx + ON logs (board_id); + +CREATE TABLE IF NOT EXISTS log_data ( + log_data_id bigserial PRIMARY KEY, + log_id bigint NOT NULL REFERENCES logs(log_id) ON DELETE CASCADE, + row_num integer NOT NULL, + + record_type text NOT NULL, + timestamp_utc timestamptz, + board_id text, + gnss_chip text, + firmware_exercise_name text, + firmware_version text, + boot_timestamp_utc timestamptz, + run_id text, + + fix_type text, + fix_dimension smallint, + sats_in_view integer, + sat_seen integer, + sats_used integer, + hdop numeric(8,3), + vdop numeric(8,3), + pdop numeric(8,3), + latitude numeric(11,8), + longitude numeric(11,8), + altitude_m numeric(10,2), + speed_mps numeric(10,3), + course_deg numeric(7,3), + pps_seen boolean, + quality_class text, + + gps_count integer, + galileo_count integer, + glonass_count integer, + beidou_count integer, + navic_count integer, + qzss_count integer, + sbas_count integer, + + mean_cn0 numeric(8,3), + max_cn0 numeric(8,3), + age_of_fix_ms integer, + ttff_ms integer, + longest_no_fix_ms integer, + + sat_talker text, + sat_constellation text, + sat_prn integer, + sat_elevation_deg integer, + sat_azimuth_deg integer, + sat_snr numeric(8,3), + sat_used_in_solution boolean, + + CONSTRAINT log_data_log_id_row_num_uq UNIQUE (log_id, row_num), + CONSTRAINT log_data_row_num_ck CHECK (row_num >= 1), + CONSTRAINT log_data_record_type_ck CHECK (record_type IN ('sample', 'satellite')), + CONSTRAINT log_data_fix_dimension_ck CHECK (fix_dimension IS NULL OR fix_dimension BETWEEN 0 AND 9), + CONSTRAINT log_data_latitude_ck CHECK (latitude IS NULL OR latitude BETWEEN -90 AND 90), + CONSTRAINT log_data_longitude_ck CHECK (longitude IS NULL OR longitude BETWEEN -180 AND 180), + CONSTRAINT log_data_course_deg_ck CHECK (course_deg IS NULL OR course_deg >= 0 AND course_deg < 360), + CONSTRAINT log_data_sat_elevation_deg_ck CHECK (sat_elevation_deg IS NULL OR sat_elevation_deg BETWEEN 0 AND 90), + CONSTRAINT log_data_sat_azimuth_deg_ck CHECK (sat_azimuth_deg IS NULL OR sat_azimuth_deg BETWEEN 0 AND 359), + CONSTRAINT log_data_sat_counts_nonnegative_ck CHECK ( + coalesce(sats_in_view, 0) >= 0 AND + coalesce(sat_seen, 0) >= 0 AND + coalesce(sats_used, 0) >= 0 AND + coalesce(gps_count, 0) >= 0 AND + coalesce(galileo_count, 0) >= 0 AND + coalesce(glonass_count, 0) >= 0 AND + coalesce(beidou_count, 0) >= 0 AND + coalesce(navic_count, 0) >= 0 AND + coalesce(qzss_count, 0) >= 0 AND + coalesce(sbas_count, 0) >= 0 + ), + CONSTRAINT log_data_ms_nonnegative_ck CHECK ( + coalesce(age_of_fix_ms, 0) >= 0 AND + coalesce(ttff_ms, 0) >= 0 AND + coalesce(longest_no_fix_ms, 0) >= 0 + ) +); + +COMMENT ON TABLE log_data IS +'One row per CSV data record. Stores both sample rows and per-satellite rows, preserving the file''s mixed row model in one typed table.'; + +COMMENT ON COLUMN log_data.log_data_id IS +'Surrogate primary key for one imported data row.'; +COMMENT ON COLUMN log_data.log_id IS +'Foreign key back to logs.log_id identifying which source file this row came from.'; +COMMENT ON COLUMN log_data.row_num IS +'1-based row number within the imported data file, excluding comment lines and any header line.'; +COMMENT ON COLUMN log_data.record_type IS +'Kind of row: sample for one epoch summary row, or satellite for one satellite observed at that epoch.'; +COMMENT ON COLUMN log_data.timestamp_utc IS +'UTC timestamp attached to the row by the GNSS logger.'; +COMMENT ON COLUMN log_data.board_id IS +'Identifier of the data-collecting board, such as GUY, AMY, or CY.'; +COMMENT ON COLUMN log_data.gnss_chip IS +'GNSS receiver model, such as MAX-M10S or L76K.'; +COMMENT ON COLUMN log_data.firmware_exercise_name IS +'Firmware exercise/program name that emitted the row.'; +COMMENT ON COLUMN log_data.firmware_version IS +'Firmware version/build identifier used when the row was logged.'; +COMMENT ON COLUMN log_data.boot_timestamp_utc IS +'Device boot timestamp in UTC as reported by the logger.'; +COMMENT ON COLUMN log_data.run_id IS +'Run/session identifier assigned by the firmware.'; +COMMENT ON COLUMN log_data.fix_type IS +'GNSS fix classification such as NO_FIX, 2D, 3D, DGPS, or similar receiver-reported state.'; +COMMENT ON COLUMN log_data.fix_dimension IS +'Numeric dimensionality of the solution. Typical values are 1 for no fix, 2 for horizontal-only, and 3 for full 3D.'; +COMMENT ON COLUMN log_data.sats_in_view IS +'Receiver-reported total satellites in view at the epoch, if provided by the firmware.'; +COMMENT ON COLUMN log_data.sat_seen IS +'Count of satellites actually emitted as satellite rows or otherwise counted by the logger at the epoch. May differ from sats_in_view depending on receiver API behavior.'; +COMMENT ON COLUMN log_data.sats_used IS +'Number of satellites used by the receiver in the navigation solution at the epoch.'; +COMMENT ON COLUMN log_data.hdop IS +'Horizontal Dilution of Precision. Lower values indicate better horizontal geometry.'; +COMMENT ON COLUMN log_data.vdop IS +'Vertical Dilution of Precision. Lower values indicate better vertical geometry.'; +COMMENT ON COLUMN log_data.pdop IS +'Position Dilution of Precision, a combined geometry indicator for 3D positioning.'; +COMMENT ON COLUMN log_data.latitude IS +'Latitude in decimal degrees referenced to the receiver''s current navigation solution.'; +COMMENT ON COLUMN log_data.longitude IS +'Longitude in decimal degrees referenced to the receiver''s current navigation solution.'; +COMMENT ON COLUMN log_data.altitude_m IS +'Altitude in meters as reported by the GNSS receiver.'; +COMMENT ON COLUMN log_data.speed_mps IS +'Ground speed in meters per second.'; +COMMENT ON COLUMN log_data.course_deg IS +'Course over ground in degrees clockwise from true north.'; +COMMENT ON COLUMN log_data.pps_seen IS +'Boolean indicator that a one-pulse-per-second timing event was observed for the epoch.'; +COMMENT ON COLUMN log_data.quality_class IS +'Firmware-defined coarse quality label such as POOR, FAIR, GOOD, or similar.'; +COMMENT ON COLUMN log_data.gps_count IS +'Number of GPS satellites counted at the epoch.'; +COMMENT ON COLUMN log_data.galileo_count IS +'Number of Galileo satellites counted at the epoch.'; +COMMENT ON COLUMN log_data.glonass_count IS +'Number of GLONASS satellites counted at the epoch.'; +COMMENT ON COLUMN log_data.beidou_count IS +'Number of BeiDou satellites counted at the epoch.'; +COMMENT ON COLUMN log_data.navic_count IS +'Number of NavIC/IRNSS satellites counted at the epoch.'; +COMMENT ON COLUMN log_data.qzss_count IS +'Number of QZSS satellites counted at the epoch.'; +COMMENT ON COLUMN log_data.sbas_count IS +'Number of SBAS satellites counted at the epoch.'; +COMMENT ON COLUMN log_data.mean_cn0 IS +'Mean carrier-to-noise-density ratio, typically in dB-Hz, across the satellites considered by the firmware at the epoch.'; +COMMENT ON COLUMN log_data.max_cn0 IS +'Maximum carrier-to-noise-density ratio, typically in dB-Hz, seen at the epoch.'; +COMMENT ON COLUMN log_data.age_of_fix_ms IS +'Age in milliseconds of the current fix solution when logged.'; +COMMENT ON COLUMN log_data.ttff_ms IS +'Time To First Fix in milliseconds for the run or receiver state being reported.'; +COMMENT ON COLUMN log_data.longest_no_fix_ms IS +'Longest interval in milliseconds spent without a usable fix during the run so far.'; +COMMENT ON COLUMN log_data.sat_talker IS +'Talker or source prefix for the satellite row, for example GP, GL, GA, GB, or similar receiver-provided code.'; +COMMENT ON COLUMN log_data.sat_constellation IS +'Human-readable constellation name for the satellite row, such as GPS, GALILEO, GLONASS, or BEIDOU.'; +COMMENT ON COLUMN log_data.sat_prn IS +'PRN or SVID number identifying the satellite within its constellation.'; +COMMENT ON COLUMN log_data.sat_elevation_deg IS +'Satellite elevation angle in degrees above the horizon.'; +COMMENT ON COLUMN log_data.sat_azimuth_deg IS +'Satellite azimuth angle in degrees clockwise from north.'; +COMMENT ON COLUMN log_data.sat_snr IS +'Satellite signal-to-noise or similar quality metric as reported by the firmware, commonly in dB-Hz.'; +COMMENT ON COLUMN log_data.sat_used_in_solution IS +'Boolean indicator that this satellite was used in the navigation solution for the epoch.'; + +CREATE INDEX IF NOT EXISTS log_data_log_id_timestamp_idx + ON log_data (log_id, timestamp_utc); + +CREATE INDEX IF NOT EXISTS log_data_run_id_idx + ON log_data (run_id); + +CREATE INDEX IF NOT EXISTS log_data_board_id_idx + ON log_data (board_id); + +CREATE INDEX IF NOT EXISTS log_data_record_type_idx + ON log_data (record_type); + +CREATE INDEX IF NOT EXISTS log_data_satellite_lookup_idx + ON log_data (sat_constellation, sat_prn, timestamp_utc) + WHERE record_type = 'satellite'; + +COMMIT;