line |
stmt |
code |
1
|
|
# Copyright 2009-2013 Bernhard M. Wiedemann |
2
|
|
# Copyright 2012-2021 SUSE LLC |
3
|
|
# SPDX-License-Identifier: GPL-2.0-or-later |
4
|
|
|
5
|
|
|
6
|
|
use Mojo::Base -strict, -signatures; |
7
|
61
|
use autodie ':all'; |
|
61
|
|
|
61
|
|
8
|
61
|
|
|
61
|
|
|
61
|
|
9
|
|
use Cwd 'cwd'; |
10
|
61
|
use File::Find; |
|
61
|
|
|
61
|
|
11
|
61
|
use Mojo::File qw(path); |
|
61
|
|
|
61
|
|
12
|
61
|
use Mojo::JSON 'decode_json'; |
|
61
|
|
|
61
|
|
13
|
61
|
use Cpanel::JSON::XS (); |
|
61
|
|
|
61
|
|
14
|
61
|
use File::Basename; |
|
61
|
|
|
61
|
|
15
|
61
|
use Try::Tiny; |
|
61
|
|
|
61
|
|
16
|
61
|
require IPC::System::Simple; |
|
61
|
|
|
61
|
|
17
|
|
use OpenQA::Benchmark::Stopwatch; |
18
|
61
|
use OpenQA::Isotovideo::Utils 'checkout_git_refspec'; |
|
61
|
|
|
61
|
|
19
|
61
|
|
|
61
|
|
|
61
|
|
20
|
|
our %needles; |
21
|
|
our %tags; |
22
|
|
our $cleanuphandler; |
23
|
|
|
24
|
|
my $needles_dir; |
25
|
|
|
26
|
|
return (ref $click_point eq 'HASH' |
27
|
6
|
&& $click_point->{xpos} |
|
6
|
|
|
6
|
|
28
|
|
&& $click_point->{ypos}) |
29
|
|
|| $click_point eq 'center'; |
30
|
|
} |
31
|
6
|
|
32
|
|
die 'needles not initialized via needle::init() before needle constructor called' unless defined $needles_dir; |
33
|
|
|
34
|
182
|
my $json; |
|
182
|
|
|
182
|
|
|
182
|
|
35
|
182
|
if (ref $jsonfile eq 'HASH') { |
36
|
|
$json = $jsonfile; |
37
|
181
|
$jsonfile = $json->{file} || path($needles_dir, $json->{name} . '.json'); |
38
|
181
|
} |
39
|
0
|
|
40
|
0
|
my $self = {}; |
41
|
|
|
42
|
|
# locate the needle's JSON file within the needle directory |
43
|
181
|
# - This code initializes $json->{file} so it contains the path within the needle directory. |
44
|
|
# - $jsonfile is re-assigned to contain the absolute path the the JSON file. |
45
|
|
# - The needle must be within the needle directory. |
46
|
|
if (index($jsonfile, $needles_dir) == 0) { |
47
|
|
$self->{file} = substr($jsonfile, length($needles_dir) + 1); |
48
|
|
} |
49
|
181
|
elsif (-f path($needles_dir, $jsonfile)) { |
50
|
137
|
# json file path already relative |
51
|
|
$self->{file} = $jsonfile; |
52
|
|
$jsonfile = path($needles_dir, $jsonfile); |
53
|
|
} |
54
|
44
|
else { |
55
|
44
|
die "Needle $jsonfile is not under needle directory $needles_dir"; |
56
|
|
} |
57
|
|
|
58
|
0
|
if (!$json) { |
59
|
|
try { |
60
|
|
$json = decode_json(path($jsonfile)->slurp); |
61
|
181
|
} |
62
|
|
catch { |
63
|
181
|
warn "broken json $jsonfile: $_"; |
64
|
|
}; |
65
|
|
return undef unless $json; |
66
|
0
|
} |
67
|
181
|
|
68
|
181
|
$self->{tags} = $json->{tags} || []; |
69
|
|
$self->{properties} = $json->{properties} || []; |
70
|
|
|
71
|
181
|
my $gotmatch; |
72
|
181
|
my $got_click_point; |
73
|
|
for my $area_from_json (@{$json->{area}}) { |
74
|
181
|
my $area = {}; |
75
|
|
for my $tag (qw(xpos ypos width height)) { |
76
|
181
|
$area->{$tag} = $area_from_json->{$tag} || 0; |
|
181
|
|
77
|
250
|
} |
78
|
250
|
for my $tag (qw(processing_flags max_offset)) { |
79
|
1000
|
$area->{$tag} = $area_from_json->{$tag} if $area_from_json->{$tag}; |
80
|
|
} |
81
|
250
|
$area->{match} = $area_from_json->{match} if $area_from_json->{match}; |
82
|
500
|
$area->{type} = $area_from_json->{type} || 'match'; |
83
|
|
$area->{margin} = $area_from_json->{margin} || 50; |
84
|
250
|
if (my $click_point = $area_from_json->{click_point}) { |
85
|
250
|
if ($got_click_point) { |
86
|
250
|
warn "$jsonfile has more than one area with a click point\n"; |
87
|
250
|
return; |
88
|
7
|
} |
89
|
1
|
if (!is_click_point_valid($click_point)) { |
90
|
1
|
warn "$jsonfile has an area with invalid click point\n"; |
91
|
|
return; |
92
|
6
|
} |
93
|
0
|
$got_click_point = 1; |
94
|
0
|
$area->{click_point} = $click_point; |
95
|
|
} |
96
|
6
|
|
97
|
6
|
$gotmatch = 1 if $area->{type} =~ /match|ocr/; |
98
|
|
|
99
|
|
$self->{area} ||= []; |
100
|
249
|
push @{$self->{area}}, $area; |
101
|
|
} |
102
|
249
|
|
103
|
249
|
# one match is mandatory |
|
249
|
|
104
|
|
warn "$jsonfile missing match area\n" && return unless $gotmatch; |
105
|
|
|
106
|
|
$self->{name} = basename($jsonfile, '.json'); |
107
|
180
|
my $png = $self->{png} || $self->{name} . ".png"; |
108
|
|
|
109
|
180
|
$self->{png} = path(dirname($jsonfile), $png)->to_string; |
110
|
180
|
warn "Can't find $self->{png}" && return unless -s $self->{png}; |
111
|
|
$self = bless $self, $classname; |
112
|
180
|
$self->register(); |
113
|
180
|
return $self; |
114
|
180
|
} |
115
|
180
|
|
116
|
180
|
my @area; |
117
|
|
for my $area_from_json (@{$self->{area}}) { |
118
|
|
my $area = {}; |
119
|
0
|
for my $tag (qw(xpos ypos width height max_offset processing_flags match type margin)) { |
|
0
|
|
|
0
|
|
|
0
|
|
120
|
0
|
$area->{$tag} = $area_from_json->{$tag} if defined $area_from_json->{$tag}; |
121
|
0
|
} |
|
0
|
|
122
|
0
|
push @area, $area; |
123
|
0
|
} |
124
|
0
|
my $json = Cpanel::JSON::XS->new->pretty->utf8->canonical->encode( |
125
|
|
{ |
126
|
0
|
tags => [sort(@{$self->{tags}})], |
127
|
|
area => \@area, |
128
|
|
properties => [$self->{properties}], |
129
|
|
}); |
130
|
0
|
open(my $fh, '>', $fn); |
131
|
|
print $fh $json; |
132
|
0
|
close $fh; |
133
|
|
} |
134
|
0
|
|
135
|
0
|
for my $g (@{$self->{tags}}) { |
136
|
0
|
@{$tags{$g}} = grep { $_ != $self } @{$tags{$g}}; |
137
|
|
delete $tags{$g} unless (@{$tags{$g}}); |
138
|
|
} |
139
|
59
|
$self->{unregistered} //= $reason || 'unknown reason'; |
|
59
|
|
|
59
|
|
|
59
|
|
140
|
59
|
} |
|
59
|
|
141
|
110
|
|
|
110
|
|
|
157
|
|
|
110
|
|
142
|
110
|
my %check_dups; |
|
110
|
|
143
|
|
for my $g (@{$self->{tags}}) { |
144
|
59
|
if ($check_dups{$g}) { |
145
|
|
bmwqemu::diag("$self->{name} contains $g twice"); |
146
|
|
next; |
147
|
220
|
} |
|
220
|
|
|
220
|
|
148
|
220
|
$check_dups{$g} = 1; |
149
|
220
|
$tags{$g} ||= []; |
|
220
|
|
150
|
391
|
push(@{$tags{$g}}, $self); |
151
|
0
|
} |
152
|
0
|
} |
153
|
|
|
154
|
391
|
# read PNG file measuring required time |
155
|
391
|
my $watch = OpenQA::Benchmark::Stopwatch->new(); |
156
|
391
|
$watch->start(); |
|
391
|
|
157
|
|
my $image = tinycv::read($image_path); |
158
|
|
$watch->stop(); |
159
|
|
if ($watch->as_data()->{total_time} > 0.1) { |
160
|
41
|
bmwqemu::diag(sprintf("load of $image_path took %.2f seconds", $watch->as_data()->{total_time})); |
|
41
|
|
|
41
|
|
|
41
|
|
161
|
|
} |
162
|
41
|
return undef unless $image; |
163
|
41
|
|
164
|
41
|
# call replacerect for exclude areas |
165
|
41
|
for my $area (@{$self->{area}}) { |
166
|
41
|
next unless $area->{type} eq 'exclude'; |
167
|
0
|
$image->replacerect($area->{xpos}, $area->{ypos}, $area->{width}, $area->{height}); |
168
|
|
} |
169
|
41
|
|
170
|
|
return { |
171
|
|
image => $image, |
172
|
39
|
image_path => $image_path, |
|
39
|
|
173
|
61
|
}; |
174
|
5
|
} |
175
|
|
|
176
|
|
my %image_cache; |
177
|
|
my $image_cache_tick = 0; |
178
|
39
|
|
179
|
|
# insert newly loaded image to cache or recycle previously cached image |
180
|
|
my $image_path = $self->{png}; |
181
|
|
my $image_cache_item = $image_cache{$image_path}; |
182
|
|
if (!$image_cache_item) { |
183
|
|
my $new_image_cache_item = $self->_load_image($image_path); |
184
|
|
return undef unless $new_image_cache_item; |
185
|
|
|
186
|
74
|
$image_cache_item = $image_cache{$image_path} = $new_image_cache_item; |
|
74
|
|
|
74
|
|
187
|
|
} |
188
|
74
|
|
189
|
74
|
$image_cache_item->{last_use} = ++$image_cache_tick; |
190
|
74
|
|
191
|
41
|
return $image_cache_item->{image}; |
192
|
41
|
} |
193
|
|
|
194
|
39
|
# compute the number of images to delete |
195
|
|
my @cache_items = values %image_cache; |
196
|
|
my $cache_size = scalar @cache_items; |
197
|
72
|
my $to_delete = $cache_size - $limit; |
198
|
|
return unless $to_delete > 0 && $to_delete <= $cache_size; |
199
|
72
|
|
200
|
|
# sort the cache items by their last use (ascending) |
201
|
|
my @sorted_cache_items = sort { $a->{last_use} <=> $b->{last_use} } @cache_items; |
202
|
13
|
|
|
13
|
|
|
13
|
|
203
|
|
# determine the minimum last use to lower the cache tick (so it won't overflow) |
204
|
13
|
my $min_last_use = $to_delete == $cache_size ? $image_cache_tick : $sorted_cache_items[$to_delete]->{last_use}; |
205
|
13
|
$image_cache_tick -= $min_last_use; |
206
|
13
|
|
207
|
13
|
my $index = -1; |
208
|
|
for my $image_cache_item (@sorted_cache_items) { |
209
|
|
if (++$index < $to_delete) { |
210
|
3
|
# delete cache items up to the number of items to delete |
|
118
|
|
211
|
|
delete $image_cache{$image_cache_item->{image_path}}; |
212
|
|
} |
213
|
3
|
else { |
214
|
3
|
# adapt last_use of items to keep to new $image_cache_tick |
215
|
|
$image_cache_item->{last_use} -= $min_last_use; |
216
|
3
|
} |
217
|
3
|
} |
218
|
35
|
} |
219
|
|
|
220
|
33
|
|
221
|
|
my $image = $self->_load_image_with_caching; |
222
|
|
return undef unless $image; |
223
|
|
return $image unless $area; |
224
|
2
|
return $area->{img} //= $image->copyrect($area->{xpos}, $area->{ypos}, $area->{width}, $area->{height}); |
225
|
|
} |
226
|
|
|
227
|
|
for my $t (@{$self->{tags}}) { |
228
|
|
return 1 if ($t eq $tag); |
229
|
5
|
} |
|
5
|
|
|
5
|
|
230
|
|
return 0; |
231
|
74
|
} |
|
74
|
|
|
74
|
|
|
74
|
|
232
|
74
|
|
233
|
74
|
return grep { ref($_) eq "HASH" ? $_->{name} eq $property_name : $_ eq $property_name } @{$self->{properties}}; |
234
|
72
|
} |
235
|
0
|
|
236
|
|
for my $property (@{$self->{properties}}) { |
237
|
|
if (ref($property) eq "HASH") { |
238
|
2
|
return $property->{value} if ($property->{name} eq $property_name); |
|
2
|
|
|
2
|
|
|
2
|
|
239
|
2
|
} |
|
2
|
|
240
|
3
|
} |
241
|
|
if ($property_name eq "workaround") { |
242
|
1
|
if ($self->{name} =~ /\S+\-(bsc|poo|bnc|boo)(\d+)\-\S+/) { |
243
|
|
return $1 . "#" . $2; |
244
|
|
} |
245
|
19
|
} |
|
19
|
|
|
19
|
|
|
19
|
|
246
|
19
|
return undef; |
|
23
|
|
|
19
|
|
247
|
|
} |
248
|
|
|
249
|
8
|
my %hash = map { $_ => $self->{$_} } qw(tags properties area file png unregistered name); |
|
8
|
|
|
8
|
|
|
8
|
|
250
|
8
|
return \%hash; |
|
8
|
|
251
|
15
|
} |
252
|
7
|
|
253
|
|
return unless (m/.json$/); |
254
|
|
my $needle = needle->new($File::Find::name); |
255
|
5
|
$needles{$needle->{name}} = $needle if $needle; |
256
|
3
|
} |
257
|
2
|
|
258
|
|
|
259
|
|
$needles_dir = $init_needles_dir; |
260
|
3
|
unless (-d $needles_dir) { |
261
|
|
die "Can't init needles from $needles_dir" if (path($needles_dir)->is_abs); |
262
|
|
$needles_dir = path($bmwqemu::vars{CASEDIR}, $needles_dir)->to_string; |
263
|
10
|
die "Can't init needles from $init_needles_dir; If one doesn't specify NEEDLES_DIR, the needles will be loaded from \$PRODUCTDIR/needles firstly or $needles_dir (\$CASEDIR + $init_needles_dir), check vars.json" unless -d $needles_dir; |
|
10
|
|
|
10
|
|
264
|
10
|
} |
|
70
|
|
265
|
10
|
$bmwqemu::vars{NEEDLES_GIT_HASH} = checkout_git_refspec($needles_dir => 'NEEDLES_GIT_REFSPEC'); |
266
|
|
|
267
|
|
%needles = (); |
268
|
441
|
%tags = (); |
|
441
|
|
269
|
441
|
bmwqemu::diag("init needles from $needles_dir"); |
270
|
137
|
find({no_chdir => 1, wanted => \&wanted_, follow => 1}, $needles_dir); |
271
|
137
|
bmwqemu::diag(sprintf("loaded %d needles", scalar keys %needles)); |
272
|
|
|
273
|
|
$cleanuphandler->() if $cleanuphandler; |
274
|
17
|
return $needles_dir; |
|
17
|
|
|
17
|
|
275
|
|
} |
276
|
20
|
|
|
20
|
|
|
20
|
|
277
|
20
|
|
278
|
20
|
|
279
|
2
|
my @wanted = split(/ /, $wanted); |
280
|
1
|
my $first_tag = shift @wanted; |
281
|
1
|
my $goods = $tags{$first_tag}; |
282
|
|
|
283
|
18
|
# go out early if there is nothing to do |
284
|
|
return $goods || [] unless $goods && @wanted; |
285
|
18
|
my @results; |
286
|
18
|
|
287
|
18
|
# now check that it contains all the other tags too |
288
|
18
|
NEEDLE: for my $n (@$goods) { |
289
|
18
|
for (@wanted) { |
290
|
|
next NEEDLE if !$n->has_tag($_); |
291
|
18
|
} |
292
|
18
|
print "adding ", $n->{name}, "\n"; |
293
|
|
push(@results, $n); |
294
|
|
} |
295
|
4
|
return \@results; |
|
4
|
|
|
4
|
|
296
|
|
} |
297
|
3
|
|
|
3
|
|
|
3
|
|
|
3
|
|
298
|
|
|
299
|
25
|
1; |
|
25
|
|
|
25
|
|