diff --git a/exercises/18_GPS_Field_QA/scripts/import_satellite_logs.pl b/exercises/18_GPS_Field_QA/scripts/import_satellite_logs.pl index aa7fc1e..91bd36e 100644 --- a/exercises/18_GPS_Field_QA/scripts/import_satellite_logs.pl +++ b/exercises/18_GPS_Field_QA/scripts/import_satellite_logs.pl @@ -39,13 +39,14 @@ sat_used_in_solution ); my @ENHANCED_COLUMNS = qw( -record_type timestamp_utc board_id gnss_chip firmware_exercise_name firmware_version -boot_timestamp_utc run_id sample_seq ms_since_run_start fix_type fix_dimension -sats_in_view sat_seen sats_used hdop vdop pdop latitude longitude altitude_m -speed_mps course_deg pps_seen quality_class gps_count galileo_count glonass_count -beidou_count navic_count qzss_count sbas_count mean_cn0 max_cn0 age_of_fix_ms -ttff_ms longest_no_fix_ms sat_talker sat_constellation sat_prn sat_elevation_deg -sat_azimuth_deg sat_snr sat_used_in_solution +record_type timestamp_utc sample_seq ms_since_run_start board_id gnss_chip +firmware_exercise_name firmware_version boot_timestamp_utc run_id +fix_type fix_dimension sats_in_view sat_seen sats_used +hdop vdop pdop latitude longitude altitude_m speed_mps course_deg pps_seen +quality_class gps_count galileo_count glonass_count beidou_count navic_count +qzss_count sbas_count mean_cn0 max_cn0 age_of_fix_ms ttff_ms longest_no_fix_ms +sat_talker sat_constellation sat_prn sat_elevation_deg sat_azimuth_deg sat_snr +sat_used_in_solution ); my $LEGACY_HEADER = join ',', @LEGACY_COLUMNS; @@ -96,7 +97,14 @@ for my $file (@ARGV) { $dbh->disconnect; exit 0; +# +# ------------------------- subs ----------------------------- +# +# +# import_file first creates an entry in logs, gets and ID, then +# parses the data rows and inserts each row. +# sub import_file { my ($dbh, $file, $opt) = @_; # @@ -123,7 +131,7 @@ sub import_file { my $line_count = 0; while (my $line = <$in>) { chomp $line; - $line =~ s/\r//; + $line =~ s/\r//g; # might be there are multiple \rs! next if $line =~ /^\s*$/ && !@data_lines && !defined $csv_header_line && !@header_lines; $line_count++; @@ -243,11 +251,25 @@ SQL $line_count = 0; # reset for my $i (0 .. $#data_lines) { my $line = $data_lines[$i]; - next if $line =~ /^\s*$/; + next if $line =~ /^\s*$/; # empty lines $line_count++; $csv->parse($line) or die "CSV parse failed in $file line @{[$i+1]}: " . $csv->error_diag . "\n"; my @fields = $csv->fields; - + # + # check for empty rows (possibly introduced during repair editing in LibreOffice Calc + # + my $all_empty = 1; + for my $field (@fields) { + if (defined $field && $field ne '') { + $all_empty = 0; + last; + } + } + if ($all_empty){ + warn "Found empty row at $line_count and skipping"; + next; + } + if (@fields != @columns) { die sprintf( "Column mismatch in %s data row %d: got %d fields, expected %d\nLine: %s\n", @@ -279,13 +301,18 @@ SQL $i + 1, (map { $row{$_} } @columns), $line, - ); - - $sth->execute(@values) or die "Line: $line_count ".$DBD::errstr; + ); + eval { + $sth->execute(@values) or die "Line: $line_count ".$DBD::errstr; + }; + if ($@){ + print "[DEBUG ".__LINE__." ] i:$i\n"; + die "Killed as error was found: $@"; + } } - die "halted before commit, but after all rows processed"; + #die "halted before commit, but after all rows processed"; $dbh->commit; - + print "After commit of row data, about to update logs entry.\n"; my $update_sql = <<'SQL'; UPDATE logs SET board_id = ?,