1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134
|
package MojoMojo::Controller::Export;
use strict;
use parent 'Catalyst::Controller';
use Archive::Zip;
use DateTime;
use Encode ();
my $model = '$c->model("DBIC::Page")';
=head1 NAME
MojoMojo::Controller::Export - Export / Import related controller
=head1 SYNOPSIS
=head1 DESCRIPTION
MojoMojo has an extensive export system. You can download all the
nodes of the wiki either as preformatted HTML, for offline reading
or in a raw format suitable for reimporting into another MojoMojo
installation. Either way, MojoMojo will create and send you a zip
file with a directory containing all the files. The name of the
directory will contain a timestamp showing when the archive was made.
=head1 ACTIONS
=head2 generate_export_filename
Create the filename under which we export pages.
=cut
sub generate_export_filename {
my ($c, $export_type) = @_;
my $now = DateTime->now();
my $prefix = sprintf("%s-%s-$export_type-%s",
$c->fixw( $c->pref('name') ),
substr($c->stash->{page}->path, 1), # skip the leading slash, because it will be replaced by an underscore
$now->ymd . 'T' . $now->hms('-')
);
$prefix =~ s|/|_|g;
return $prefix;
}
=head2 export_raw
This action will give you a ZIP file containing the raw wiki source
for all the nodes of the wiki.
=cut
sub export_raw : Global {
my ( $self, $c ) = @_;
if ( !$c->user_exists() ) {
$c->stash->{message} = $c->loc('To export, you must be logged in.');
$c->detach('MojoMojo::Controller::PageAdmin', 'unauthorized');
}
my $prefix = generate_export_filename($c, 'markup');
unless ( $c->res->{body} = $c->cache->get($prefix) ) {
my @pages = $c->stash->{page}->descendants;
my $archive = Archive::Zip->new();
$archive->addDirectory("$prefix/");
foreach my $page (@pages) {
next if not $page->content;
# XXX - see notes from export_html about encode_utf8
$archive->addString(
Encode::encode_utf8($page->content->body),
$prefix . $page->path . ( $page->path eq '/' ? '' : '/' ) . 'index' );
}
my $fh = IO::Scalar->new( \$c->res->{body} );
$archive->writeToFileHandle($fh);
$c->cache->set( $prefix, $c->res->body );
}
$c->res->headers->header( "Content-Type" => 'archive/zip' );
$c->res->headers->header( "Content-Disposition" => "attachment; filename=$prefix.zip" );
}
=head2 export_html (/.export_html)
This action will give you a ZIP file containing HTML formatted
versions of all the nodes of the wiki.
=cut
sub export_html : Global {
my ( $self, $c ) = @_;
if ( !$c->user_exists() ) {
$c->stash->{message} = $c->loc('To export, you must be logged in.');
$c->detach('MojoMojo::Controller::PageAdmin', 'unauthorized');
}
my $prefix = generate_export_filename($c, 'html');
unless ( $c->res->{body} = $c->cache->get($prefix) ) {
my @pages = $c->stash->{page}->descendants;
my $archive = Archive::Zip->new();
$archive->addDirectory("$prefix/");
foreach my $page (@pages) {
$c->log->debug( 'Rendering ' . $page->path );
# XXX - Note: subreq calls and gets unicode data from Catalyst
# (because we're using Plugin::Unicode ;). However,
# seems like Compress::Zlib expects octets -- so we explicitly
# encode them back to utf8 - lestrrat
$archive->addString(
Encode::encode_utf8($c->subreq( '/print', { path => $page->path } )),
$prefix . $page->path . "/index.html"
);
}
my $fh = IO::Scalar->new( \$c->res->{body} );
$archive->writeToFileHandle($fh);
$c->cache->set( $prefix, $c->res->body );
}
$c->res->headers->header( "Content-Type" => 'archive/zip' );
$c->res->headers->header( "Content-Disposition" => "attachment; filename=$prefix.zip" );
}
=head1 AUTHOR
Marcus Ramberg <mramberg@cpan.org>
=head1 LICENSE
This library is free software. You can redistribute it and/or modify
it under the same terms as Perl itself.
=cut
1;
|