summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorÉtienne Mollier <emollier@debian.org>2024-02-13 21:04:02 +0100
committerÉtienne Mollier <emollier@debian.org>2024-02-13 21:04:02 +0100
commit5255b394867b6c899ee690e166f1b37a5b335433 (patch)
treea947280f4374994183ba7f30cc53bedd3c5f1b75
parentee648a8f6a8a49c073f70745be311f5ee5056efc (diff)
parent59e6fdfbd023802c91bfb1ea262b209e48abd9e6 (diff)
Update upstream source from tag 'upstream/2.2'
Update to upstream version '2.2' with Debian dir 59d2beafd0d34f42a2ab111badb89e8663eb0376
-rw-r--r--CHANGELOG18
-rwxr-xr-xansicat.py120
-rw-r--r--man/ansicat.12
-rw-r--r--man/netcache.111
-rw-r--r--man/opnk.14
-rwxr-xr-xnetcache.py15
-rwxr-xr-xoffpunk.py100
-rw-r--r--offutils.py8
-rwxr-xr-xopnk.py26
9 files changed, 217 insertions, 87 deletions
diff --git a/CHANGELOG b/CHANGELOG
index c5e190f..0d4b8f2 100644
--- a/CHANGELOG
+++ b/CHANGELOG
@@ -1,5 +1,23 @@
# Offpunk History
+
+## 2.2 - February 13th 2023
+- cache folder is now configurable through $OFFPUNK_CACHE_PATH environment variable (by prx)
+- offpunk: adding an URL to a list now update the view mode if url already present
+- netcache: solve an infinite gemini loop with code 6X (see also bug #31)
+- ansicat: added support for <video> HTML-element
+- ansicat: if chafa fails to load an image, fallback to timg if available
+- offpunk: add list autocompletion to "tour"
+- offpunk: removed "blackbox", which has not been used nor maintained
+- offpunk: "gus" was broken, it is functionnal again
+- opnk/offpunk: more informative prompt in less
+- ansicat: added support for HTML description elements <dt> and <dd> (by Bert Livens)
+- opnk: added "--mode" command-line argument (bug #39)
+- offpunk: support for "preformatted" theming (bug #38)
+- opnk/netcache: added "--cache-validity" command-line argument (bug #37)
+- ansicat: consider files as XML, not SVG, if they don’t have .svg extension
+- offpunk: fix "view link" crashing with link to empty files
+
## 2.1 - December 15th 2023
- freshly updated gemtext/rss links are highlighted ("new_link" theme option)
- offpunk : new "copy title" and "copy link" function
diff --git a/ansicat.py b/ansicat.py
index fe0e102..af33bb5 100755
--- a/ansicat.py
+++ b/ansicat.py
@@ -106,7 +106,8 @@ def inline_image(img_file,width):
if not os.path.exists(img_file):
return ""
#Chafa is faster than timg inline. Let use that one by default
- inline = None
+ #But we keep a list of "inlines" in case chafa fails
+ inlines = []
ansi_img = ""
#We avoid errors by not trying to render non-image files
if shutil.which("file"):
@@ -120,32 +121,39 @@ def inline_image(img_file,width):
if hasattr(img_obj,"n_frames") and img_obj.n_frames > 1:
# we remove all frames but the first one
img_obj.save(img_file,format="gif",save_all=False)
- inline = "chafa --bg white -s %s -f symbols"
+ inlines.append("chafa --bg white -s %s -f symbols")
elif _NEW_CHAFA:
- inline = "chafa --bg white -t 1 -s %s -f symbols --animate=off"
- if not inline and _NEW_TIMG:
- inline = "timg --frames=1 -p q -g %sx1000"
- if inline:
- cmd = inline%width + " %s"
+ inlines.append("chafa --bg white -t 1 -s %s -f symbols --animate=off")
+ if _NEW_TIMG:
+ inlines.append("timg --frames=1 -p q -g %sx1000")
+ image_success = False
+ while not image_success and len(inlines)>0:
+ cmd = inlines.pop(0)%width + " %s"
try:
ansi_img = run(cmd, parameter=img_file)
+ image_success = True
except Exception as err:
- ansi_img = "***image failed : %s***\n" %err
+ ansi_img = "***IMAGE ERROR***\n%s…\n…%s" %(str(err)[:50],str(err)[-50:])
return ansi_img
def terminal_image(img_file):
#Render by timg is better than old chafa.
# it is also centered
- cmd = None
+ cmds = []
if _NEW_CHAFA:
- cmd = "chafa -C on -d 0 --bg white -t 1 -w 1"
- elif _NEW_TIMG:
- cmd = "timg --loops=1 -C"
+ cmds.append("chafa -C on -d 0 --bg white -t 1 -w 1")
elif _HAS_CHAFA:
- cmd = "chafa -d 0 --bg white -t 1 -w 1"
- if cmd:
- cmd = cmd + " %s"
- run(cmd, parameter=img_file, direct_output=True)
+ cmds.append("chafa -d 0 --bg white -t 1 -w 1")
+ if _NEW_TIMG:
+ cmds.append("timg --loops=1 -C")
+ image_success = False
+ while not image_success and len(cmds) > 0:
+ cmd = cmds.pop(0) + " %s"
+ try:
+ run(cmd, parameter=img_file, direct_output=True)
+ image_success = True
+ except Exception as err:
+ print(err)
# First, we define the different content->text renderers, outside of the rest
@@ -357,15 +365,28 @@ class AbstractRenderer():
# Beware, blocks are not wrapped nor indented and left untouched!
# They are mostly useful for pictures and preformatted text.
- def add_block(self,intext):
+ def add_block(self,intext,theme=None):
# If necessary, we add the title before a block
self._title_first()
# we don’t want to indent blocks
self._endline()
self._disable_indents()
- self.final_text += self.current_indent + intext
- self.new_paragraph = False
- self._endline()
+ #we have to apply the theme for every line in the intext
+ #applying theme to preformatted is controversial as it could change it
+ if theme:
+ block = ""
+ lines = intext.split("\n")
+ for l in lines:
+ self.open_theme(theme)
+ self.last_line += self.current_indent + l
+ self.close_theme(theme)
+ self._endline()
+ self.last_line += "\n"
+ #one thing is sure : we need to keep unthemed blocks for images!
+ else:
+ self.final_text += self.current_indent + intext
+ self.new_paragraph = False
+ self._endline()
self._enable_indents()
def add_text(self,intext):
@@ -633,7 +654,7 @@ class GemtextRenderer(AbstractRenderer):
r.close_theme("preformatted")
elif preformatted:
# infinite line to not wrap preformated
- r.add_block(line+"\n")
+ r.add_block(line+"\n",theme="preformatted")
elif len(line.strip()) == 0:
r.newparagraph(force=True)
elif line.startswith("=>"):
@@ -1077,7 +1098,7 @@ class HtmlRenderer(AbstractRenderer):
toreturn = " " + toreturn
return toreturn
def recursive_render(element,indent="",preformatted=False):
- if element.name == "blockquote":
+ if element.name in ["blockquote", "dd"]:
r.newparagraph()
r.startindent(" ",reverse=" ")
for child in element.children:
@@ -1085,7 +1106,7 @@ class HtmlRenderer(AbstractRenderer):
recursive_render(child,indent="\t")
r.close_theme("blockquote")
r.endindent()
- elif element.name in ["div","p"]:
+ elif element.name in ["div","p","dt"]:
r.newparagraph()
for child in element.children:
recursive_render(child,indent=indent)
@@ -1114,8 +1135,8 @@ class HtmlRenderer(AbstractRenderer):
recursive_render(child,indent=indent,preformatted=True)
elif element.name in ["pre"]:
r.newparagraph()
- r.add_block(element.text)
- r.newparagraph()
+ r.add_block(element.text,theme="preformatted")
+ r.newparagraph(force=True)
elif element.name in ["li"]:
r.startindent(" • ",sub=" ")
for child in element.children:
@@ -1196,6 +1217,52 @@ class HtmlRenderer(AbstractRenderer):
r.add_text(text + link_id)
r.close_theme("image_link")
r.newline()
+
+ elif element.name == "video":
+ poster = element.get("poster")
+ src = element.get("src")
+ for child in element.children:
+ if not src:
+ if child.name == "source":
+ src = child.get("src")
+ text = ""
+ if poster:
+ ansi_img = render_image(poster,width=width,mode=mode)
+ alt = element.get("alt")
+ if alt:
+ alt = sanitize_string(alt)
+ text += "[VIDEO] %s"%alt
+ else:
+ text += "[VIDEO]"
+
+ if poster:
+ if not mode in self.images:
+ self.images[mode] = []
+ poster_url,d = looks_like_base64(poster,self.url)
+ if poster_url:
+ vid_url,d2 = looks_like_base64(src,self.url)
+ self.images[mode].append(poster_url)
+ r.add_block(ansi_img)
+ r.open_theme("image_link")
+ r.center_line()
+ if vid_url and src:
+ links.append(vid_url+" "+text)
+ link_id = " [%s]"%(len(links)+startlinks)
+ r.add_text(text + link_id)
+ else:
+ r.add_text(text)
+ r.close_theme("image_link")
+ r.newline()
+ elif src:
+ vid_url,d = looks_like_base64(src,self.url)
+ links.append(vid_url+" "+text)
+ link_id = " [%s]"%(len(links)+startlinks)
+ r.open_theme("image_link")
+ r.center_line()
+ r.add_text(text + link_id)
+ r.close_theme("image_link")
+ r.newline()
+
elif element.name == "br":
r.newline()
elif element.name not in ["script","style","template"] and type(element) != Comment:
@@ -1288,6 +1355,9 @@ def get_mime(path,url=None):
# If it’s a xml file, consider it as such, regardless of what file thinks
elif path.endswith(".xml"):
mime = "text/xml"
+ # If it doesn’t end with .svg, it is probably an xml, not a SVG file
+ elif "svg" in mime and not path.endswith(".svg"):
+ mime = "text/xml"
#Some xml/html document are considered as octet-stream
if mime == "application/octet-stream":
mime = "text/xml"
diff --git a/man/ansicat.1 b/man/ansicat.1
index 14d71a4..e1161d0 100644
--- a/man/ansicat.1
+++ b/man/ansicat.1
@@ -54,6 +54,8 @@ either thanks to the MIME type,
or from the file being rendered itself.
.It Fl \-mime Ar MIME
MIME type of the content to parse.
+.It Fl \-mode Ar MODE
+MODE to use to render to choose between normal (default), full or source
.It Fl \-url Ar URL ...
original URL of the content.
.El
diff --git a/man/netcache.1 b/man/netcache.1
index d2aab99..d7e3320 100644
--- a/man/netcache.1
+++ b/man/netcache.1
@@ -27,6 +27,15 @@ otherwise it would always refresh it from the version available online.
It is also useful for mapping a given URL to its location in the cache,
independently of whether it has been downloaded first.
.Pp
+Default cache path is
+.Pa ~/.cache/offpunk .
+Set
+.Ev OFFPUNK_CACHE_PATH
+environment variable to use another location.
+.Bd -literal
+OFFPUNK_CACHE_PATH=/home/ploum/custom-cache netcache.py gemini://some.url
+.Ed
+.Pp
.Xr Offpunk 1
is a command-line browser and feed reader dedicated to browsing the Web,
Gemini, Gopher and Spartan.
@@ -47,6 +56,8 @@ The value is expressed in megabytes.
.It Fl \-timeout Ar TIMEOUT
time to wait before cancelling connection.
The value is expressed in seconds.
+.It Fl \-cache-validity CACHE_VALIDITY
+Maximum age (in second) of the cached version before redownloading a new version.
.El
.
.Sh EXIT STATUS
diff --git a/man/opnk.1 b/man/opnk.1
index 079e1e7..a859b83 100644
--- a/man/opnk.1
+++ b/man/opnk.1
@@ -37,6 +37,10 @@ path to the file or URL to open.
.Bl -tag -width Ds -offset indent
.It Fl h , \-help
Show a help message and exit
+.It Fl \-mode Ar MODE
+MODE to use to render to choose between normal (default), full or source
+.It Fl \-cache-validity CACHE_VALIDITY
+Maximum age (in second) of the cached version before redownloading a new version.
.El
.
.Sh EXIT STATUS
diff --git a/netcache.py b/netcache.py
index e235a25..7465f85 100755
--- a/netcache.py
+++ b/netcache.py
@@ -731,9 +731,9 @@ def _fetch_gemini(url,timeout=DEFAULT_TIMEOUT,interactive=True,accept_bad_ssl_ce
raise RuntimeError(meta)
# Client cert
elif status.startswith("6"):
- print("Handling certificates for status 6X are not supported by offpunk\n")
- print("See bug #31 for discussion about the problem")
- _fetch_gemini(url)
+ error = "Handling certificates for status 6X are not supported by offpunk\n"
+ error += "See bug #31 for discussion about the problem"
+ raise RuntimeError(error)
# Invalid status
elif not status.startswith("2"):
raise RuntimeError("Server returned undefined status code %s!" % status)
@@ -878,11 +878,12 @@ def main():
help="Cancel download of items above that size (value in Mb).")
parser.add_argument("--timeout", type=int,
help="Time to wait before cancelling connection (in second).")
+ parser.add_argument("--cache-validity",type=int, default=0,
+ help="maximum age, in second, of the cached version before \
+ redownloading a new version")
# No argument: write help
parser.add_argument('url', metavar='URL', nargs='*',
help='download URL and returns the content or the path to a cached version')
- # arg = URL: download and returns cached URI
- # --cache-validity : do not download if cache is valid
# --validity : returns the date of the cached version, Null if no version
# --force-download : download and replace cache, even if valid
args = parser.parse_args()
@@ -893,8 +894,8 @@ def main():
if args.offline:
path = get_cache_path(u)
else:
- print("Download URL: %s" %u)
- path,url = fetch(u,max_size=args.max_size,timeout=args.timeout)
+ path,url = fetch(u,max_size=args.max_size,timeout=args.timeout,\
+ validity=args.cache_validity)
if args.path:
print(path)
else:
diff --git a/offpunk.py b/offpunk.py
index 10f7398..5514aee 100755
--- a/offpunk.py
+++ b/offpunk.py
@@ -4,7 +4,7 @@
Offline-First Gemini/Web/Gopher/RSS reader and browser
"""
-__version__ = "2.1"
+__version__ = "2.2"
## Initial imports and conditional imports {{{
import argparse
@@ -206,6 +206,8 @@ class GeminiClient(cmd.Cmd):
return [i+" " for i in allowed if i.startswith(text)]
def complete_move(self,text,line,begidx,endidx):
return self.complete_add(text,line,begidx,endidx)
+ def complete_tour(self,text,line,begidx,endidx):
+ return self.complete_add(text,line,begidx,endidx)
def complete_theme(self,text,line,begidx,endidx):
elements = offthemes.default
@@ -299,7 +301,7 @@ class GeminiClient(cmd.Cmd):
else:
self.page_index = 0
# Update state (external files are not added to history)
- self.current_url = url
+ self.current_url = modedurl
if update_hist and not self.sync_only:
self._update_history(modedurl)
else:
@@ -920,7 +922,8 @@ Use 'ls -l' to see URLs."""
if not line:
print("What?")
return
- self._go_to_url(urllib.parse.urlunparse("gemini","geminispace.info","/search","",line,""))
+ search = line.replace(" ","%20")
+ self._go_to_url("gemini://geminispace.info/search?%s"%search)
def do_history(self, *args):
"""Display history."""
@@ -988,8 +991,11 @@ Use "view XX" where XX is a number to view information about link XX.
if netcache.is_cache_valid(link_url):
last_modified = netcache.cache_last_modified(link_url)
link_renderer = self.get_renderer(link_url)
- link_title = link_renderer.get_page_title()
- print(link_title)
+ if link_renderer:
+ link_title = link_renderer.get_page_title()
+ print(link_title)
+ else:
+ print("Empty cached version")
print("Last cached on %s"%time.ctime(last_modified))
else:
print("No cached version for this link")
@@ -1215,8 +1221,6 @@ archives, which is a special historical list limited in size. It is similar to `
url = self.current_url
r = self.get_renderer(url)
if r:
- mode = r.get_mode()
- url = mode_url(url,mode)
title = r.get_page_title()
else:
title = ""
@@ -1233,23 +1237,26 @@ archives, which is a special historical list limited in size. It is similar to `
return False
else:
if not url:
- url,mode = unmode_url(self.current_url)
+ url = self.current_url
+ unmoded_url,mode = unmode_url(url)
# first we check if url already exists in the file
- with open(list_path,"r") as l_file:
- lines = l_file.readlines()
- l_file.close()
- for l in lines:
- sp = l.split()
- if url in sp:
- if verbose:
- print("%s already in %s."%(url,list))
- return False
- with open(list_path,"a") as l_file:
- l_file.write(self.to_map_line(url))
- l_file.close()
- if verbose:
- print("%s added to %s" %(url,list))
- return True
+ if self.list_has_url(url,list,exact_mode=True):
+ if verbose:
+ print("%s already in %s."%(url,list))
+ return False
+ # If the URL already exists but without a mode, we update the mode
+ # FIXME: this doesn’t take into account the case where you want to remove the mode
+ elif url != unmoded_url and self.list_has_url(unmoded_url,list):
+ self.list_update_url_mode(unmoded_url,list,mode)
+ if verbose:
+ print("%s has updated mode in %s to %s"%(url,list,mode))
+ else:
+ with open(list_path,"a") as l_file:
+ l_file.write(self.to_map_line(url))
+ l_file.close()
+ if verbose:
+ print("%s added to %s" %(url,list))
+ return True
@needs_gi
def list_add_top(self,list,limit=0,truncate_lines=0):
@@ -1288,8 +1295,14 @@ archives, which is a special historical list limited in size. It is similar to `
def list_rm_url(self,url,list):
return self.list_has_url(url,list,deletion=True)
+ def list_update_url_mode(self,url,list,mode):
+ return self.list_has_url(url,list,update_mode = mode)
+
# deletion and has_url are so similar, I made them the same method
- def list_has_url(self,url,list,deletion=False):
+ # deletion : true or false if you want to delete the URL
+ # exact_mode : True if you want to check only for the exact url, not the canonical one
+ # update_mode : a new mode to update the URL
+ def list_has_url(self,url,list,deletion=False, exact_mode=False, update_mode = None):
list_path = self.list_path(list)
if list_path:
to_return = False
@@ -1298,7 +1311,8 @@ archives, which is a special historical list limited in size. It is similar to `
lf.close()
to_write = []
# let’s remove the mode
- url=unmode_url(url)[0]
+ if not exact_mode:
+ url=unmode_url(url)[0]
for l in lines:
# we separate components of the line
# to ensure we identify a complete URL, not a part of it
@@ -1306,15 +1320,27 @@ archives, which is a special historical list limited in size. It is similar to `
if url not in splitted and len(splitted) > 1:
current = unmode_url(splitted[1])[0]
#sometimes, we must remove the ending "/"
- if url == current:
- to_return = True
- elif url.endswith("/") and url[:-1] == current:
+ if url == current or (url.endswith("/") and url[:-1] == current):
to_return = True
+ if update_mode:
+ new_line = l.replace(current,mode_url(url,update_mode))
+ to_write.append(new_line)
+ elif not deletion:
+ to_write.append(l)
else:
to_write.append(l)
- else:
+ elif url in splitted:
to_return = True
- if deletion :
+ # We update the mode if asked by replacing the old url
+ # by a moded one in the same line
+ if update_mode:
+ new_line = l.replace(url,mode_url(url,update_mode))
+ to_write.append(new_line)
+ elif not deletion:
+ to_write.append(l)
+ else:
+ to_write.append(l)
+ if deletion or update_mode:
with open(list_path,"w") as lf:
for l in to_write:
lf.write(l)
@@ -1588,19 +1614,6 @@ The following lists cannot be removed or frozen but can be edited with "list edi
else:
cmd.Cmd.do_help(self, arg)
- ### Flight recorder
- def do_blackbox(self, *args):
- """Display contents of flight recorder, showing statistics for the
-current gemini browsing session."""
- lines = []
- # Compute flight time
- now = time.time()
- delta = now - self.log["start_time"]
- hours, remainder = divmod(delta, 3600)
- minutes, seconds = divmod(remainder, 60)
- # Assemble lines
- lines.append(("Patrol duration", "%02d:%02d:%02d" % (hours, minutes, seconds)))
-
def do_sync(self, line):
"""Synchronize all bookmarks lists and URLs from the to_fetch list.
- New elements in pages in subscribed lists will be added to tour
@@ -1879,7 +1892,6 @@ def main():
gc.onecmd(line)
lists = None
gc.call_sync(refresh_time=refresh_time,depth=depth,lists=args.url)
- gc.onecmd("blackbox")
else:
# We are in the normal mode. First process config file
torun_queue = read_config(torun_queue,rcfile=args.config_file,interactive=True)
diff --git a/offutils.py b/offutils.py
index ff34ef0..0d4244c 100644
--- a/offutils.py
+++ b/offutils.py
@@ -62,9 +62,15 @@ def xdg(folder="cache"):
#if no XDG .local/share and not XDG .config, we use the old config
if not os.path.exists(data_home) and os.path.exists(_old_config):
_DATA_DIR = _CONFIG_DIR
+ ## get _CACHE_PATH from OFFPUNK_CACHE_PATH environment variable
+ # if OFFPUNK_CACHE_PATH empty, set default to ~/.cache/offpunk
cache_home = os.environ.get('XDG_CACHE_HOME') or\
os.path.join(_home,'.cache')
- _CACHE_PATH = os.path.join(os.path.expanduser(cache_home),"offpunk/")
+ _CACHE_PATH = os.environ.get('OFFPUNK_CACHE_PATH', \
+ os.path.join(os.path.expanduser(cache_home),"offpunk/"))
+ #Check that the cache path ends with "/"
+ if not _CACHE_PATH.endswith("/"):
+ _CACHE_PATH += "/"
os.makedirs(_CACHE_PATH,exist_ok=True)
if folder == "cache" and not UPGRADED:
upgrade_cache(_CACHE_PATH)
diff --git a/opnk.py b/opnk.py
index 1345ee1..615a5d2 100755
--- a/opnk.py
+++ b/opnk.py
@@ -48,16 +48,16 @@ else:
# -S : do not wrap long lines. Wrapping is done by offpunk, longlines
# are there on purpose (surch in asciiart)
#--incsearch : incremental search starting rev581
-if less_version >= 581:
- less_base = "less --incsearch --save-marks -~ -XRfMWiS"
-elif less_version >= 572:
- less_base = "less --save-marks -XRfMWiS"
-else:
- less_base = "less -XRfMWiS"
-_DEFAULT_LESS = less_base + " \"+''\" %s"
-_DEFAULT_CAT = less_base + " -EF %s"
-
def less_cmd(file, histfile=None,cat=False,grep=None):
+ less_prompt = "page %%d/%%D- lines %%lb/%%L - %%Pb\%%"
+ if less_version >= 581:
+ less_base = "less --incsearch --save-marks -~ -XRfWiS -P \"%s\""%less_prompt
+ elif less_version >= 572:
+ less_base = "less --save-marks -XRfMWiS"
+ else:
+ less_base = "less -XRfMWiS"
+ _DEFAULT_LESS = less_base + " \"+''\" %s"
+ _DEFAULT_CAT = less_base + " -EF %s"
if histfile:
env = {"LESSHISTFILE": histfile}
else:
@@ -280,12 +280,18 @@ def main():
opnk will fallback to opening the file with xdg-open. If given an URL as input \
instead of a path, opnk will rely on netcache to get the networked content."
parser = argparse.ArgumentParser(prog="opnk",description=descri)
+ parser.add_argument("--mode", metavar="MODE",
+ help="Which mode should be used to render: normal (default), full or source.\
+ With HTML, the normal mode try to extract the article.")
parser.add_argument("content",metavar="INPUT", nargs="*",
default=sys.stdin, help="Path to the file or URL to open")
+ parser.add_argument("--cache-validity",type=int, default=0,
+ help="maximum age, in second, of the cached version before \
+ redownloading a new version")
args = parser.parse_args()
cache = opencache()
for f in args.content:
- cache.opnk(f)
+ cache.opnk(f,mode=args.mode,validity=args.cache_validity)
if __name__ == "__main__":
main()