Ubuntu Pastebin

Paste from david at Fri, 29 May 2015 11:47:38 +0000

Download as text
 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
#! /usr/bin/python3

import os
import markdown
import tempfile
import requests
from bs4 import BeautifulSoup
import re

SNAPPY_VERSION = "15.04"
MAP = {"autopilot.md": None,
       "config.md": "config-command",
       "cross-build.md": None,
       "frameworks.md": "frameworks",
       "garbage.md": None,
       "hashes.md": None,
       "meta.md": "package-metadata",
       "oem.md": "oem",
       "package-names.md": None,
       "security.md": "security-policy"}


def get_snappy_docs():
    snappy_dir = tempfile.mkdtemp()
    snappy_docs_path = "%s/docs/" % snappy_dir
    snappy_bzr = "bzr checkout --lightweight lp:snappy/%s %s" % (
        SNAPPY_VERSION, snappy_dir)
    os.system(snappy_bzr)
    docs = [snappy_docs_path+x for x in os.listdir(snappy_docs_path)]
    return docs


def md_to_html(doc):
    if doc.endswith(".md"):
        f = open(doc, 'r').read()
        html = markdown.markdown(f, output_format="html5")
        begin = "<div class=\"row no-border\">\n<div class=\"eight-col\">\n"
        end = "</div>\n</div>"
        html = begin + html + end
        html = html.replace("<pre><code>",
                            "</div><div class=\"twelve-col\"><pre><code>")
        html = html.replace("</code></pre>",
                            "</code></pre></div><div class=\"eight-col\">")
        return html


def get_published_titles():
    titles = {}
    for m in MAP:
        if MAP.get(m, False):
            url = "https://developer.ubuntu.com/en/snappy/guides/%s" % MAP[m]
            r = requests.get(url)
            soup = BeautifulSoup(r.text)
            titles[m] = soup.title.string.split(" | ")[0]
    return titles


def replace_links(doc, titles):
    for t in titles:
        url = "https://developer.ubuntu.com/en/snappy/guides/%s" % MAP[t]
        link = "<a href=\"%s\">%s</a>" % (url, titles[t])
        doc = str(doc).replace(t, link)
    doc = BeautifulSoup(doc)
    doc = doc.prettify()
    doc = re.sub(r"<html>\n\s<body>\n", "", doc, flags=re.MULTILINE)
    doc = re.sub(r"\s<\/body>\n<\/html>", "", doc, flags=re.MULTILINE)
    return doc

titles = get_published_titles()
for doc_path in get_snappy_docs():
    doc = md_to_html(doc_path)
    doc = replace_links(doc, titles)
    t = open(doc_path.split("/")[-1].replace('.md', '.html'), "w")
    t.write(doc)
    t.close()
Download as text