RSS Git Download  Clone
Raw Blame History
use XML::SAX::ParserFactory;
use XML::Validator::Schema;
use XML::Simple qw(:strict);
use Data::Printer;
use Config::Auto;
use Modern::Perl; # say
use Test::More;
use IO::All;

=begin # tests:
tests the genomics_xml.pl script - generates dummy data in lims_test and provides
overrides for location/name of archive & prevents ftp transfer of file
=cut

BEGIN { require 't/test-lib.pl' }

my $dbix = get_dbix();
my $cfg  = get_config(); # p $cfg;
my $db   = 'lims_test';

# for validation:
my $xsd = 'genomics.xsd'; # current xsd - synlinked to most recent file
my $xsd_src  = $cfg->{path_to_app_root} . '/setup/schema/xsd/' . $xsd; # p $xsd_src;

create_genomics_data(); # drop & recreate tables/data

{ # execute genomics_xml.pl script:
    my $script = '/script/crons/hourly/genomics_xml.pl';
    my @args   = ( -d => $db, -t => 10 ); # override dbname, set duration 10 seconds
    system ( $cfg->{path_to_app_root} . $script, @args );
}

# for validation tests:
my $xml_src = io("/tmp/${db}.xml")->slurp; # p $xml_src;

# unzip archive and extract to $xml (not using gzip any more):
# gunzip "/tmp/${db}.gz" => \my $xml or die "gunzip failed: $GunzipError\n"; # p $xml;

my $genomics_uat = do {
    my $cfg_file = $cfg->{path_to_app_root} . '/script/crons/lib/settings.cfg'; # p $cfg_file;
    Config::Auto::parse($cfg_file)->{genomics}->{uat};
}; # p $genomics_uat;

{ # compare src data and parsed xml hashrefs:
	my $ref = XMLin("/tmp/${db}.xml", ForceArray => 0, KeyAttr => []); # p $ref->{Record};
    # read Data::Dumper txt back into hashref format:
	my $src = do "/tmp/${db}.txt"; # p $src; # http://www.perlmonks.org/?node_id=304068

    # if Consent / Files exist, convert hashref element in $ref to an arrayref to match $src:
    if ( my $r = $ref->{Record}->{Consent}->{Files} ) { # p $ref;
        my $str = $r->{file}; # p $str; # scalar value - should be arrayref to match $src
        $ref->{Record}->{Consent}->{Files}->{file} = [ $str ]; # scalar value to arrayref
    }
	is_deeply($src, $ref->{Record}, 'data structures identical');

    { # check Rare disease has expected participant/GeL ID:
        my $allocated_id = $ref->{Record}->{Demographics}->{participant_id};
        my $expected_id  = $genomics_uat->{rare_disease} + 1;
        is($allocated_id, $expected_id, 'expected GeL ID for rare disease type');
    }
}

# change some xml params and check validation failure:
{ 
    # decimal on xs:short:
    validate('edta1_fluidx_vol', 175.5);
    # string on xs:int:
    validate('family_id', 'string');
    # invalid date:
    validate('consent_date', '2/4/2016');
    # invalid datetime:
    validate('edta1_qc_date', '2/4/2016 10.00');
    # exceed max value for xs:int (2147483647):
    validate('family_id', 2_147_483_647 + 1);
    # exceed max value for xs:short (32767):
    validate('volume_stored', 32_767 + 1);
    # exceed max value for xs:byte (127):
    validate('total_samples', 127 + 1);
}

# delete temp files:
io("/tmp/${db}.$_")->unlink for qw(txt xml);

done_testing(9);

sub create_genomics_data { # create new genomics data:
	my $dbh = $dbix->dbh;
	# drop lab_tests & lab_sections, re-create from genomics db:
	for ( qw/lab_tests lab_sections screens screen_category/ ) {
		$dbh->do(qq!TRUNCATE TABLE `$_`!) ;
		$dbh->do(qq!INSERT INTO `$_` SELECT * FROM `genomics`.`$_`!);
	}
	$dbh->do(qq!TRUNCATE TABLE `$_`!) for qw(request_lab_test_results
		storage_racks request_initial_screen request_history request_storage);

	{ # test/results:
		my %results = map { split ',' } grep chomp $_, <DATA>; # p %vals; exit;
		$dbix->insert('request_lab_test_results',
			{
				lab_test_id => $_,
				request_id  => 1,
				result      => $results{$_},
			}
		) for keys %results;
	}
	$dbix->select('screens', 'id', { description => 'Rare disease' })->into(my $id);
	$dbix->insert('request_initial_screen', { request_id => 1, screen_id => $id });
	$dbix->insert('request_lab_test_history',
		{
			request_id => 1,
			user_id    => 1,
			action 	   => 'auto-set Consent given status to complete',
		}
	);
	$dbix->insert('request_lab_test_history',
		{ request_id => 1, action => 'screened', user_id => 1 });
	$dbix->insert('storage_racks', { plateId => 'SR12345' });
	for (1..2) {
		$dbix->insert('request_storage',
			{
				vial_location => 'C'.$_,
				part_number   => '123456789'.$_,
				request_id    => 1,
				vialId 		  => 'FR1122334'.$_,
				sample 		  => 'tissue',
				source 		  => 'PST',
				rack_id 	  => 1,
			}
		);
	}
	# delete 2nd request - gets included in query, only able to handle 1 request:
	$dbix->delete('requests', { id => 2});
}

sub validate {
    my ($param, $val) = @_;
    
    my $xml = $xml_src; # clone it so text substitution localised

    $xml =~ s!<$param>(.*)</$param>!<$param>$val</$param>!;
    my $result = validate_xml_against_xsd($xml); # warn $result;
    my $msg = join ' ', $param, 'validated OK';
    like ( $result, qr/Illegal value '$val' in element <$param>/, $msg );
}

sub validate_xml_against_xsd {
    my $xml = shift;
	# switch debug on to show data structure:
    my $validator = XML::Validator::Schema->new(file => $xsd_src, debug => 0);
    my $parser    = XML::SAX::ParserFactory->parser(Handler => $validator);
    eval { $parser->parse_string($xml); };
    return $@;
}

__DATA__
1,yes
10,DNA FF Germline
15,2016-02-06
16,R1/R2
17,R2
18,no
19,yes
20,yes
21,yes
22,yes
23,yes
24,yes
25,yes
26,yes
27,yes
28,yes
29,yes
30,yes
31,no
32,3
33,124578
34,235689
35,258369
36,369147
37,147258
38,v2
39,2016-02-08 12:10:00
40,Agarose
41,test result value
42,GMC
43,852369
44,175
45,1254575
46,C5
55,789321
56,225
57,2016-02-06 17:55:26
58,2016-02-07 17:45:10
59,3
60,2016-02-02 18:00:41
61,2016-02-05 17:50:35
62,951753654
63,753951
64,PST
65,25813
66,H3
67,2016-02-04 15:30:30
68,interested
69,decline
70,123456789