summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rwxr-xr-xgemfeed.py29
1 files changed, 16 insertions, 13 deletions
diff --git a/gemfeed.py b/gemfeed.py
index 93d5886..a3cfb25 100755
--- a/gemfeed.py
+++ b/gemfeed.py
@@ -7,6 +7,7 @@ import os.path
import re
import stat
import urllib.parse
+from fnmatch import fnmatch
from feedgen.feed import FeedGenerator
@@ -55,16 +56,17 @@ def find_files(directory, time_func, n=10):
Return the n most recently created world readable files with extensions of
.gmi or .gemini, as a list sorted from most to least recent.
"""
- files = []
- for extension in ("gmi", "gemini"):
- glob_pattern = os.path.join(directory, "*.{}".format(extension))
- files.extend(glob.glob(glob_pattern))
- index = os.path.join(directory, "index.{}".format(extension))
- if index in files:
- files.remove(index)
- files = [f for f in files if is_world_readable(f)]
- files.sort(key=time_func, reverse=True)
- return files[0:n]
+ gemini_files = []
+ for root, dirs, files in os.walk(directory):
+ path = root.split(os.sep)
+ for f in files:
+ full_path = os.path.join(root, f)
+ for extension in ("gmi", "gemini"):
+ if not fnmatch(f, 'index.{}'.format(extension)) and \
+ fnmatch(f, '*.{}'.format(extension)) and \
+ is_world_readable(full_path):
+ gemini_files.append(full_path)
+ return sorted(gemini_files, key=time_func, reverse=True)[0:n]
def urljoin(base, url):
"""
@@ -85,13 +87,14 @@ def urljoin(base, url):
joined = joined._replace(scheme="gemini")
return urllib.parse.urlunsplit(joined)
-def populate_entry_from_file(filename, base_url, entry, time_func):
+def populate_entry_from_file(directory, filename, base_url, entry, time_func):
"""
Set the id, title, updated and link attributes of the provided
FeedGenerator entry object according the contents of the named
Gemini file and the base URL.
"""
- url = urljoin(base_url, os.path.basename(filename))
+ relative_filename = os.path.relpath(filename, start=directory)
+ url = urljoin(base_url, relative_filename)
entry.guid(url)
entry.link(href=url, rel="alternate")
updated = get_update_time(filename, time_func)
@@ -157,7 +160,7 @@ def build_feed(directory, time_func, base_url, output="atom.xml", n=10,
return
for n, filename in enumerate(files):
entry = feed.add_entry()
- populate_entry_from_file(filename, base_url, entry, time_func)
+ populate_entry_from_file(directory, filename, base_url, entry, time_func)
if n == 0:
feed.updated(entry.updated())
if verbose: