honk
This commit is contained in:
parent
c4ca25055d
commit
ce7a65e70d
|
@ -3,15 +3,61 @@ package App::BlogAlba;
|
|||
use strict;
|
||||
use warnings;
|
||||
|
||||
use 5.010;
|
||||
|
||||
use App::BlogAlba::Publisher;
|
||||
|
||||
use Data::Paginated;
|
||||
|
||||
use Dancer2;
|
||||
use Dancer2::Plugin::Feed;
|
||||
use Dancer2::Plugin::Paginator;
|
||||
|
||||
our $VERSION = '0.95';
|
||||
|
||||
my $postsdir = Dancer2::FileUtils::path(
|
||||
setting('appdir'), 'published', 'posts'
|
||||
);
|
||||
die "Posts directory $postsdir does not exist!" unless -e -d $postsdir;
|
||||
my $pagesdir = Dancer2::FileUtils::path(
|
||||
setting('appdir'), 'published', 'pages'
|
||||
);
|
||||
$pagesdir = undef unless -e -d $pagesdir;
|
||||
|
||||
my (@posts,@pages);
|
||||
my ($cachetime,$cachedelta);
|
||||
|
||||
sub ValidParams {
|
||||
return 1;
|
||||
}
|
||||
sub Cache {
|
||||
#
|
||||
}
|
||||
sub Initialise {
|
||||
#
|
||||
}
|
||||
|
||||
get '/' => sub {
|
||||
}
|
||||
|
||||
get '/page/:num' => sub {
|
||||
}
|
||||
|
||||
get '/:page' => sub {
|
||||
}
|
||||
|
||||
get '/feed/:type' => sub {
|
||||
pass unless params->{type} =~ /^(rss|atom)$/i;
|
||||
create_feed
|
||||
format => lc params->{type},
|
||||
title => config->{blogtitle},
|
||||
link => request->base,
|
||||
entries => \@entries;
|
||||
}
|
||||
|
||||
get '/wrote/:year/:month/:slug' => sub {
|
||||
pass unless ValidParams scalar params 'route';
|
||||
|
||||
}
|
||||
|
||||
1;
|
||||
__END__
|
||||
|
|
|
@ -3,6 +3,8 @@ package App::BlogAlba::Publisher;
|
|||
use strict;
|
||||
use warnings;
|
||||
|
||||
use Carp;
|
||||
|
||||
use Text::Markdown::Hoedown;
|
||||
use YAML;
|
||||
|
||||
|
@ -11,14 +13,47 @@ use Date::Parse qw/str2time/;
|
|||
|
||||
use Unicode::Normalize;
|
||||
|
||||
=head1 NAME
|
||||
|
||||
App::BlogAlba::Publisher
|
||||
|
||||
=head1 VERSION
|
||||
|
||||
Version 0.1
|
||||
|
||||
=cut
|
||||
|
||||
our $VERSION = '0.1';
|
||||
|
||||
use Exporter::Easy (
|
||||
EXPORT => [ qw// ],
|
||||
OK => [ qw// ],
|
||||
OK => [ qw/ParseArticle, ParseArticles/ ],
|
||||
);
|
||||
|
||||
=head1 SYNOPSIS
|
||||
|
||||
Allows a fairly genericised "load files from given path with given regexp and turn them into a data structure" setup.
|
||||
|
||||
use App::BlogAlba::Publisher;
|
||||
my @articles = ParseArticles('/your/path','^.*\.md$');
|
||||
|
||||
=head1 TERMINOLOGY
|
||||
|
||||
=head2 Article
|
||||
|
||||
In the context of this module, an "article" is any valid markdown document which contains a YAML preamble, of the format:
|
||||
|
||||
---
|
||||
YAML
|
||||
---
|
||||
|
||||
MARKDOWN
|
||||
|
||||
=cut
|
||||
|
||||
sub FindArticles {
|
||||
my ($path,$regexp) = @_;
|
||||
opendir ARTICLES, $path or return undef;
|
||||
opendir ARTICLES, $path or carp "Couldn't open path '$path'!" and return undef;
|
||||
my @articles;
|
||||
while(readdir ARTICLES) {
|
||||
push @articles, "$path/$_" if /$regexp/;
|
||||
|
@ -26,8 +61,34 @@ sub FindArticles {
|
|||
closedir ARTICLES;
|
||||
return @articles;
|
||||
}
|
||||
|
||||
=head1 SUBROUTINES
|
||||
|
||||
=head2 ParseArticle
|
||||
|
||||
Allows a single file to be parsed. Returns a hashref of the resulting data structure.
|
||||
Requires one argument, the path to a valid article.
|
||||
|
||||
my $article = ParseArticle('/path/to/your/article.md');
|
||||
|
||||
=cut
|
||||
|
||||
sub ParseArticle {
|
||||
my $file = shift;
|
||||
return {};
|
||||
}
|
||||
|
||||
=head2 ParseArticles
|
||||
|
||||
A wrapper around ParseArticle() for parsing all articles in a directory. Uses FindArticles() internally for retrieving the list of articles within the directory.
|
||||
Requires two arguments; the path to a directory containing articles, and a regular expression with which to compare filenames against.
|
||||
|
||||
=cut
|
||||
|
||||
sub ParseArticles {
|
||||
|
||||
my @articles = FindArticles @_;
|
||||
my @parsed_articles;
|
||||
push @parsed_articles, ParseArticle $_ for @articles;
|
||||
}
|
||||
|
||||
1;
|
||||
|
|
Loading…
Reference in New Issue