1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186
|
#!/usr/bin/perl
#
# Scrolls freshmeat headlines.
#
# By Joey Hess <joey@kitenet.net>, GPL copyright 1998.
$http_port = 80;
$host = 'freshmeat.net';
$http_file = '/backend/recentnews.txt';
$update_interval=30 * 60; # the page only updates every 2 hours
# now, maybe more often later, 30 minutes seems reasonable.
$ticker_update_interval=1; # tell ticker to update as often as possible.
$scrollstring="Getting FreshMeat, please wait..";
$size=10000; # max buffer size.
use Socket;
use Getopt::Long;
BEGIN {
# Don't depend on Date.pm, but it's nice to have.
eval "use Date::Parse";
if ($@) {
$no_date_parse=1;
}
}
GetOptions(
"help|h",\&usage,
"max|m=i",\$max,
"age|a=i",\$age,
) || usage();
if ($no_date_parse && $age ne undef) {
die "Date.pm not found, cannot parse dates."
}
if ($max ne undef && $max < 1) {
die "max must be positive";
}
# Parse proxy settings.
if ($ENV{http_proxy}) {
if ($ENV{http_proxy} =~ m#http://([^:]*):(\d+)#) {
$proxy_host=$1;
$proxy_port=$2;
$proxy=1;
}
else {
die "Invalid http proxy settings, cannot parse \"$ENV{http_proxy}\"";
}
}
$SIG{'INT'} = 'quit';
$SIG{'QUIT'} = 'quit';
# Use shared memory to communicate with the ticker program.
$IPC_PRIVATE = 0;
$IPC_RMID = 0;
$key=shmget($IPC_PRIVATE, $size , 0600 ) || die $!;
shmwrite($key, $scrollstring, 0, $size ) || die $!;
# Fork the ticker program off.
if (!($newpid=fork)) {
exec "ticker","-s$key","-S$size","-bgreen","-c$ticker_update_interval",@ARGV;
kill 3,getppid();
die "Cannot execute ticker program: $! - is it in the PATH?";
}
while (1) {
$scrollstring="";
$ss=get_freshmeat();
$scrollstring.="Freshmeat news -- $ss" if $ss;
if ($scrollstring) {
chop $scrollstring; # trailing space.
}
else {
$scrollstring="No news is good news.."
}
shmwrite($key, "$scrollstring", 0, $size ) || die $!;
sleep $update_interval;
}
# Connect filehandle S to a socket.
sub connectt { my ($host, $port) = @_;
$ipaddr = gethostbyname($host);
$sockaddr = sockaddr_in($port, $ipaddr);
socket(S, PF_INET, SOCK_STREAM, getprotobyname('tcp'));
connect(S, $sockaddr);
select S;
$| = 1;
select STDOUT;
$| = 1;
}
# Set up filehandle S to get an url.
# Handles reading and ignoring the http headers the server returns, so
# the next read from S after this function starts at the top of the page.
# Speaks http/1.0, and uses proxy servers if $ENV{http_proxy} is set.
sub http_get { my $http_host=shift; my $file=shift;
# I used ?: - hope wakko doesn't find out!
connectt(($proxy_host ? $proxy_host : $http_host),
($proxy_port ? $proxy_port : $http_port));
if (!$proxy) {
print S "GET $file HTTP/1.0\r\nHost: $http_host\r\nUser-agent: freshmeat-ticker\r\n\r\n";
}
else {
print S "GET http://$http_host/$file HTTP/1.0\r\nHost: $http_host\r\nUser-agent: freshmeat-ticker\r\n\r\n";
}
# Read http headers.
$/="\r\n"; # for http headers
while (<S>) {
chomp;
last unless $_;
}
$/="\n"; # for body
}
sub quit {
# Mark the shared memory for deletion.
shmctl($key, $IPC_RMID, 0) || die $!;
exit;
}
sub usage {
print <<__eof__;
Usage: $0 [options] -- [ticker options]
ticker options options to pass on to the ticker program
-h, --help display this help
-m num, --max=num display this many messages maximum
-a min, --age=min display only articles that are less than this many
minutes old.
__eof__
exit 1;
}
sub too_old {
if ($age ne undef) {
$time=str2time($date);
if ((time() - $time) > ($age * 60)) {
return 1;
}
}
return undef;
}
# It's important this function only accept ($), or it will slurp in <S>
# below.
sub strip_html ($) { $_=shift;
s/<[^>]*>/$1/g;
return $_;
}
sub get_freshmeat {
my $ss=undef;
http_get($host, $http_file);
$count=0;
while (<S>) {
chomp;
$title=strip_html($_);
$date=<S>." EDT"; # eew, hardcoded timezones
last if too_old($date);
if ($age ne undef) {
$time=str2time($date);
if ((time() - $time) > ($age * 60)) {
# assume all articles after this one
# are older.
last;
}
}
<S>; # url
$ss.="$title -- ";
if ($max ne undef) {
$count++;
last if ($count == $max);
}
}
close S;
return $ss;
}
|