Image for T-Beam is in good working shape, restructuring the Perl data importer to deal with the 44 columns using hashes rather than positions
This commit is contained in:
parent
e28ebe5b17
commit
d3043533ce
9 changed files with 174 additions and 41 deletions
|
|
@ -53,7 +53,7 @@ my $ENHANCED_HEADER = join ',', @ENHANCED_COLUMNS;
|
|||
|
||||
my %opt = (
|
||||
dbname => 'satellite_data',
|
||||
host => 'localhost',
|
||||
host => 'ryzdesk',
|
||||
port => 5432,
|
||||
schema => 'public',
|
||||
);
|
||||
|
|
@ -99,32 +99,40 @@ exit 0;
|
|||
|
||||
sub import_file {
|
||||
my ($dbh, $file, $opt) = @_;
|
||||
#
|
||||
# get a fixed-length hash (fingerprint) so we do not accidently
|
||||
# load the same file twice.
|
||||
#
|
||||
my $sha256 = "";
|
||||
my $blob;
|
||||
{
|
||||
open my $fh, '<:raw', $file or die "Cannot open $file: $!\n";
|
||||
local $/;
|
||||
$blob = <$fh>;
|
||||
close $fh;
|
||||
}
|
||||
|
||||
open my $fh, '<:raw', $file or die "Cannot open $file: $!\n";
|
||||
local $/;
|
||||
my $blob = <$fh>;
|
||||
close $fh;
|
||||
|
||||
my $sha256 = sha256_hex($blob // '');
|
||||
$sha256 = sha256_hex($blob // '');
|
||||
my $file_size = -s $file;
|
||||
|
||||
open my $in, '<:encoding(UTF-8)', $file or die "Cannot open $file: $!\n";
|
||||
|
||||
|
||||
my @header_lines;
|
||||
my $csv_header_line;
|
||||
my @data_lines;
|
||||
|
||||
my $line_count = 0;
|
||||
while (my $line = <$in>) {
|
||||
chomp $line;
|
||||
$line =~ s/\r\z//;
|
||||
$line =~ s/\r//;
|
||||
|
||||
next if $line =~ /^\s*$/ && !@data_lines && !defined $csv_header_line && !@header_lines;
|
||||
|
||||
$line_count++;
|
||||
print "B Processing $line_count\n";
|
||||
if ($line =~ /^#/) {
|
||||
push @header_lines, $line;
|
||||
next;
|
||||
}
|
||||
|
||||
# record_type is the first entry in the column heading
|
||||
if (!defined $csv_header_line && $line =~ /^record_type,/) {
|
||||
$csv_header_line = $line;
|
||||
next;
|
||||
|
|
@ -172,8 +180,11 @@ sub import_file {
|
|||
sat_azimuth_deg sat_snr sat_used_in_solution
|
||||
);
|
||||
|
||||
my $col_count = 0;
|
||||
for my $col (@columns) {
|
||||
die "Unexpected column '$col' in $file\n" if !$allowed{$col};
|
||||
$col_count++;
|
||||
die "Unexpected column at column \# $col_count \"$col\" in $file\nHeader line: $csv_header_line\n"
|
||||
if !$allowed{$col};
|
||||
}
|
||||
|
||||
my $raw_header_text = join("\n", @header_lines);
|
||||
|
|
@ -229,11 +240,11 @@ SQL
|
|||
my ($first_ts, $last_ts, $board_id, $gnss_chip, $fw_name, $fw_ver, $boot_ts, $run_id);
|
||||
|
||||
$dbh->begin_work;
|
||||
|
||||
$line_count = 0; # reset
|
||||
for my $i (0 .. $#data_lines) {
|
||||
my $line = $data_lines[$i];
|
||||
next if $line =~ /^\s*$/;
|
||||
|
||||
$line_count++;
|
||||
$csv->parse($line) or die "CSV parse failed in $file line @{[$i+1]}: " . $csv->error_diag . "\n";
|
||||
my @fields = $csv->fields;
|
||||
|
||||
|
|
@ -270,9 +281,9 @@ SQL
|
|||
$line,
|
||||
);
|
||||
|
||||
$sth->execute(@values);
|
||||
$sth->execute(@values) or die "Line: $line_count ".$DBD::errstr;
|
||||
}
|
||||
|
||||
die "halted before commit, but after all rows processed";
|
||||
$dbh->commit;
|
||||
|
||||
my $update_sql = <<'SQL';
|
||||
|
|
@ -313,9 +324,11 @@ SQL
|
|||
|
||||
sub parse_header_columns {
|
||||
my ($line) = @_;
|
||||
#print "DEBUG [".__LINE__."] header line = $line\n";
|
||||
my $csv = Text::CSV_XS->new({ binary => 1, auto_diag => 1 });
|
||||
$csv->parse($line) or die "Cannot parse header line: " . $csv->error_diag . "\n";
|
||||
my @cols = $csv->fields;
|
||||
#print "DEBUG [".__LINE__."] columns found: ".@cols."\n";
|
||||
s/^\s+|\s+$//g for @cols;
|
||||
return @cols;
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue