--- comics/fetch.pl.new 2018/04/22 14:03:54 1.26 +++ comics/fetch.pl.new 2022/10/04 12:02:03 1.30 @@ -2,6 +2,18 @@ ############################################################################### # $Log: fetch.pl.new,v $ +# Revision 1.30 2022/10/04 12:02:03 nick +# Added --no-check-certificate for wget calls as arcamax was failing its cert check. Meh, whatever. It's just comics. +# +# Revision 1.29 2020/06/10 21:32:52 nick +# Centered page +# +# Revision 1.28 2020/06/10 21:14:31 nick +# Updated for w3 validation. +# +# Revision 1.27 2019/04/15 12:50:23 nick +# The script was unable to handle html '&' and convert it, so I added that. I probably should see if there's a library or something that handles all those automagically but I just tossed a regex in there for now that does the trick. +# # Revision 1.26 2018/04/22 14:03:54 nick # Changed the default for Sunday comics that was causing issues with some comics. # @@ -55,11 +67,12 @@ use Pod::Usage; use Getopt::Long; use JSON::Create 'create_json'; use Date::Calc qw/Date_to_Text_Long Today Day_of_Week Day_of_Week_to_Text/; +use Data::Dumper; ## ## Some default values ## -my $ver = '$Id: fetch.pl.new,v 1.26 2018/04/22 14:03:54 nick Exp $'; +my $ver = '$Id: fetch.pl.new,v 1.30 2022/10/04 12:02:03 nick Exp $'; my $comicFile = "comics.conf"; my $comicConfigVer = "Unknown"; my $reportFile = "/home/httpd/html/daily/comics/status_report.json"; @@ -117,8 +130,8 @@ foreach my $comic ( sort keys %comics ) close(IMG); - system( "/usr/bin/convert -resize 640 $file $file" ) - if ( $size > 640 ) + system( "/usr/bin/convert -resize 800 $file $file" ) + if ( $size > 800 ) } ## &writeMainIndex ( \%dates ); @@ -211,7 +224,7 @@ sub writeStatusReportJSON ($$) { my $shortDate = sprintf("%d%02d%02d", (localtime)[5] + 1900, (localtime)[4] + 1, (localtime)[3]); - my %json = ('date' => $shortDate, 'comics' => []); + my %json = ('date' => $shortDate, 'comics' => ()); my $totalErrors = 0; foreach my $comic (sort keys %comics) { @@ -220,13 +233,13 @@ sub writeStatusReportJSON ($$) { my %error = ('comicName' => "$comics{$comic}{'fullName'}", 'error' => "$comics{$comic}{'error'}", 'status' => "Error"); - push $json{'comics'}, \%error; + push @{$json{'comics'}}, \%error; $totalErrors += 1; } else { my %status = ('comicName' => "$comics{$comic}{'fullName'}", 'error' => 0, 'status' => "Successfull"); - push $json{'comics'}, \%status; + push @{$json{'comics'}}, \%status; } } $json{'totalErrors'} = $totalErrors; @@ -244,6 +257,7 @@ sub writeComic ($$) { my $indexFile = $indexDir . "/index-" . $date->{'year2'} . $date->{'mon2'} . $date->{'day2'} . "-" . $sd . ".html"; + $comics->{$comic}{'fullName'} =~ s/&/&/g; my $content = <{$comic}{'fullName'}) ******* --> @@ -306,15 +320,13 @@ sub writeFooter { print INDEX <
- -Generated on: $sysDate
-Version: $ver
-Config Version: $comicConfigVer
-CVS: http://demandred.dyndns.org/cgi-bin/cvsweb/comics/ -

+Generated on: $sysDate
+Version: $ver
+Config Version: $comicConfigVer
+CVS: http://demandred.dyndns.org/cgi-bin/cvsweb/comics/ +
Valid XHTML 1.0 Transitional -

@@ -351,19 +363,15 @@ sub writeTitle ($$) { - - + + Daily Comics for $today - -
- - +
+ - - EOF close (INDEX); } @@ -380,7 +388,7 @@ sub directDownload ($$) { my $cDir = $date->{'mon2'} . $date->{'year2'}; my $cDate = $date->{'day2'}; - my $cmd = "wget -q $file --referer=\"" . $comics->{$comic}{'url'} ."\" --user-agent=\"$USER_AGENT\" -O - | /usr/bin/convert - jpeg:images/$cDir/$comic-$cDate.jpg"; + my $cmd = "wget --no-check-certificate -q $file --referer='" . $comics->{$comic}{'url'} ."' --user-agent=\"$USER_AGENT\" -O - | /usr/bin/convert - jpeg:images/$cDir/$comic-$cDate.jpg"; return system($cmd); } @@ -392,10 +400,19 @@ sub indexDownload ($$) { my ( @lines, $comicLine, $mainURL ); my $comicIndex = "indexes/index.$comic"; - my $wget_cmd = "wget -q --referer=\"$comics->{$comic}{'url'}\" " . - "--user-agent=\"$USER_AGENT\" " . + print("Getching Index $comicIndex.\n"); + print("comic url: $comics->{$comic}{'url'}\n"); + + print Dumper($comics->{$comic}); + + my $wget_cmd = "wget --referer='$comics->{$comic}{'url'}' " . + "--no-check-certificate --user-agent=\"$USER_AGENT\" " . "$comics->{$comic}{'url'} -O $comicIndex"; - system($wget_cmd); + print ("Using wget command:\n$wget_cmd\n"); + + my $status = system($wget_cmd); + + print ("Return status: $status\n"); if ( ! open FILEN, "<$comicIndex" ) { return "ERROR: Can't open index file for " . $comics->{$comic}{'fullName'} . @@ -403,11 +420,12 @@ sub indexDownload ($$) { } while () { my $line = $_; - $line =~ s/\R|\ \ +|\t//g if ( $comics->{$comic}{'remove_newliens'} ); + $line =~ s/\R|\ \ +|\t//g if ( $comics->{$comic}{'remove_newlines'} ); push @lines, $line; } close (FILEN); + unlink ("$comicIndex"); $mainURL = $comics->{$comic}{'url'}; @@ -418,6 +436,7 @@ sub indexDownload ($$) { ## ## Find the comic strip URL based on the specified regex in the search ## + foreach my $line (@lines) { if ( $line =~ m/$comics->{$comic}{'search'}/i ) { $comicLine = $1; chomp $comicLine; @@ -433,7 +452,9 @@ sub indexDownload ($$) { if ( $comicLine ) { if ( $comicLine =~ m/(gif|jpg|png)/i ) { $comics->{$comic}{'ext'} = $1; } my $comicURL = ( $comicLine =~ m/http/ ) ? $comicLine : $mainURL . $comicLine; - my $cmd = "wget --user-agent=\"$USER_AGENT\" --referer=\"" . $comics->{$comic}{'url'} . "\" -q $comicURL -O images/$cDir/$comic-$cDate.$comics->{$comic}{'ext'}"; + # Strip & + $comicURL =~ s/\&\;/&/g; + my $cmd = "wget --no-check-certificate --user-agent=\"$USER_AGENT\" --referer='" . $comics->{$comic}{'url'} . "' -q '$comicURL' -O images/$cDir/$comic-$cDate.$comics->{$comic}{'ext'}"; system( $cmd ); return 0; }
Comic Page Heading
$today_long