Klaus Demo ~jonashaag/klaus / 5e3a0e4
Black format all the things Jonas Haag 13 days ago
23 changed file(s) with 794 addition(s) and 533 deletion(s). Raw diff Collapse all Expand all
1616 def git_repository(path):
1717 path = os.path.abspath(path)
1818 if not os.path.exists(path):
19 raise argparse.ArgumentTypeError('%r: No such directory' % path)
19 raise argparse.ArgumentTypeError("%r: No such directory" % path)
2020 try:
2121 Repo(path)
2222 except NotGitRepository:
23 raise argparse.ArgumentTypeError('%r: Not a Git repository' % path)
23 raise argparse.ArgumentTypeError("%r: Not a Git repository" % path)
2424 return path
2525
2626
2727 def make_parser():
2828 parser = argparse.ArgumentParser()
29 parser.add_argument('--host', help="default: 127.0.0.1", default='127.0.0.1')
30 parser.add_argument('--port', help="default: 8080", default=8080, type=int)
31 parser.add_argument('--site-name', help="site name showed in header. default: your hostname")
32 parser.add_argument('--version', help='print version number', action='store_true')
33 parser.add_argument('-b', '--browser', help="open klaus in a browser on server start",
34 default=False, action='store_true')
35 parser.add_argument('-B', '--with-browser', help="specify which browser to use with --browser",
36 metavar='BROWSER', default=None)
37 parser.add_argument('--ctags', help="enable ctags for which revisions? default: none. "
38 "WARNING: Don't use 'ALL' for public servers!",
39 choices=['none', 'tags-and-branches', 'ALL'], default='none')
29 parser.add_argument("--host", help="default: 127.0.0.1", default="127.0.0.1")
30 parser.add_argument("--port", help="default: 8080", default=8080, type=int)
31 parser.add_argument(
32 "--site-name", help="site name showed in header. default: your hostname"
33 )
34 parser.add_argument("--version", help="print version number", action="store_true")
35 parser.add_argument(
36 "-b",
37 "--browser",
38 help="open klaus in a browser on server start",
39 default=False,
40 action="store_true",
41 )
42 parser.add_argument(
43 "-B",
44 "--with-browser",
45 help="specify which browser to use with --browser",
46 metavar="BROWSER",
47 default=None,
48 )
49 parser.add_argument(
50 "--ctags",
51 help="enable ctags for which revisions? default: none. "
52 "WARNING: Don't use 'ALL' for public servers!",
53 choices=["none", "tags-and-branches", "ALL"],
54 default="none",
55 )
4056
41 parser.add_argument('repos', help='repositories to serve',
42 metavar='DIR', nargs='*', type=git_repository)
57 parser.add_argument(
58 "repos",
59 help="repositories to serve",
60 metavar="DIR",
61 nargs="*",
62 type=git_repository,
63 )
4364
4465 grp = parser.add_argument_group("Git Smart HTTP")
45 grp.add_argument('--smarthttp', help="enable Git Smart HTTP serving",
46 action='store_true')
47 grp.add_argument('--htdigest', help="use credentials from FILE",
48 metavar="FILE", type=argparse.FileType('r'))
66 grp.add_argument(
67 "--smarthttp", help="enable Git Smart HTTP serving", action="store_true"
68 )
69 grp.add_argument(
70 "--htdigest",
71 help="use credentials from FILE",
72 metavar="FILE",
73 type=argparse.FileType("r"),
74 )
4975
5076 grp = parser.add_argument_group("Development flags", "DO NOT USE IN PRODUCTION!")
51 grp.add_argument('--debug', help="Enable Werkzeug debugger and reloader", action='store_true')
77 grp.add_argument(
78 "--debug", help="Enable Werkzeug debugger and reloader", action="store_true"
79 )
5280
5381 return parser
5482
6189 return 0
6290
6391 if args.htdigest and not args.smarthttp:
64 print("ERROR: --htdigest option has no effect without --smarthttp enabled", file=sys.stderr)
92 print(
93 "ERROR: --htdigest option has no effect without --smarthttp enabled",
94 file=sys.stderr,
95 )
6596 return 1
6697
6798 if not args.repos:
68 print("WARNING: No repositories supplied -- syntax is 'klaus dir1 dir2...'.", file=sys.stderr)
99 print(
100 "WARNING: No repositories supplied -- syntax is 'klaus dir1 dir2...'.",
101 file=sys.stderr,
102 )
69103
70104 if not args.site_name:
71 args.site_name = '%s:%d' % (args.host, args.port)
105 args.site_name = "%s:%d" % (args.host, args.port)
72106
73 if args.ctags != 'none':
107 if args.ctags != "none":
74108 from klaus.ctagsutils import check_have_exuberant_ctags
109
75110 if not check_have_exuberant_ctags():
76 print("ERROR: Exuberant ctags not installed (or 'ctags' binary isn't *Exuberant* ctags)", file=sys.stderr)
111 print(
112 "ERROR: Exuberant ctags not installed (or 'ctags' binary isn't *Exuberant* ctags)",
113 file=sys.stderr,
114 )
77115 return 1
78116 try:
79117 import ctags
93131
94132 app.run(args.host, args.port, args.debug)
95133
134
96135 def _open_browser(args):
97136 # Open a web browser onto the server URL. Technically we're jumping the
98137 # gun a little here since the server is not yet running, but there's no
104143 opener = webbrowser.open
105144 else:
106145 opener = webbrowser.get(args.with_browser).open
107 opener('http://%s:%s' % (args.host, args.port))
146 opener("http://%s:%s" % (args.host, args.port))
108147
109148
110 if __name__ == '__main__':
149 if __name__ == "__main__":
111150 exit(main())
66 from klaus.repo import FancyRepo, InvalidRepo
77
88
9 KLAUS_VERSION = utils.guess_git_revision() or '1.5.2'
9 KLAUS_VERSION = utils.guess_git_revision() or "1.5.2"
1010
1111
1212 class Klaus(flask.Flask):
1313 jinja_options = {
14 'extensions': ['jinja2.ext.autoescape'],
15 'undefined': jinja2.StrictUndefined
14 "extensions": ["jinja2.ext.autoescape"],
15 "undefined": jinja2.StrictUndefined,
1616 }
1717
18 def __init__(self, repo_paths, site_name, use_smarthttp, ctags_policy='none'):
18 def __init__(self, repo_paths, site_name, use_smarthttp, ctags_policy="none"):
1919 """(See `make_app` for parameter descriptions.)"""
2020 self.site_name = site_name
2121 self.use_smarthttp = use_smarthttp
3333 """Called by Flask.__init__"""
3434 env = super(Klaus, self).create_jinja_environment()
3535 for func in [
36 'force_unicode',
37 'timesince',
38 'shorten_sha1',
39 'shorten_message',
40 'extract_author_name',
41 'formattimestamp',
36 "force_unicode",
37 "timesince",
38 "shorten_sha1",
39 "shorten_message",
40 "extract_author_name",
41 "formattimestamp",
4242 ]:
4343 env.filters[func] = getattr(utils, func)
4444
45 env.globals['KLAUS_VERSION'] = KLAUS_VERSION
46 env.globals['USE_SMARTHTTP'] = self.use_smarthttp
47 env.globals['SITE_NAME'] = self.site_name
45 env.globals["KLAUS_VERSION"] = KLAUS_VERSION
46 env.globals["USE_SMARTHTTP"] = self.use_smarthttp
47 env.globals["SITE_NAME"] = self.site_name
4848
4949 return env
5050
5151 def setup_routes(self):
52 # fmt: off
5253 for endpoint, rule in [
5354 ('repo_list', '/'),
5455 ('robots_txt', '/robots.txt/'),
7071 ('download', '/<repo>/tarball/<path:rev>/'),
7172 ]:
7273 self.add_url_rule(rule, view_func=getattr(views, endpoint))
74 # fmt: on
7375
7476 def should_use_ctags(self, git_repo, git_commit):
75 if self.ctags_policy == 'none':
77 if self.ctags_policy == "none":
7678 return False
77 elif self.ctags_policy == 'ALL':
79 elif self.ctags_policy == "ALL":
7880 return True
79 elif self.ctags_policy == 'tags-and-branches':
81 elif self.ctags_policy == "tags-and-branches":
8082 return git_commit.id in git_repo.get_tag_and_branch_shas()
8183 else:
8284 raise ValueError("Unknown ctags policy %r" % self.ctags_policy)
9294 return valid_repos, invalid_repos
9395
9496
95 def make_app(repo_paths, site_name, use_smarthttp=False, htdigest_file=None,
96 require_browser_auth=False, disable_push=False, unauthenticated_push=False,
97 ctags_policy='none'):
97 def make_app(
98 repo_paths,
99 site_name,
100 use_smarthttp=False,
101 htdigest_file=None,
102 require_browser_auth=False,
103 disable_push=False,
104 unauthenticated_push=False,
105 ctags_policy="none",
106 ):
98107 """
99108 Returns a WSGI app with all the features (smarthttp, authentication)
100109 already patched in.
124133 if disable_push:
125134 raise ValueError("'unauthenticated_push' set with 'disable_push'")
126135 if require_browser_auth:
127 raise ValueError("Incompatible options 'unauthenticated_push' and 'require_browser_auth'")
136 raise ValueError(
137 "Incompatible options 'unauthenticated_push' and 'require_browser_auth'"
138 )
128139 if htdigest_file and not (require_browser_auth or use_smarthttp):
129 raise ValueError("'htdigest_file' set without 'use_smarthttp' or 'require_browser_auth'")
140 raise ValueError(
141 "'htdigest_file' set without 'use_smarthttp' or 'require_browser_auth'"
142 )
130143
131144 app = Klaus(
132145 repo_paths,
139152 if use_smarthttp:
140153 # `path -> Repo` mapping for Dulwich's web support
141154 dulwich_backend = dulwich.server.DictBackend(
142 {'/'+name: repo for name, repo in app.valid_repos.items()}
155 {"/" + name: repo for name, repo in app.valid_repos.items()}
143156 )
144157 # Dulwich takes care of all Git related requests/URLs
145158 # and passes through everything else to klaus
163176 # Git will never call /<repo-name>/git-receive-pack if authentication
164177 # failed for /info/refs, but since it's used to upload stuff to the server
165178 # we must secure it anyway for security reasons.
166 PATTERN = r'^/[^/]+/(info/refs\?service=git-receive-pack|git-receive-pack)$'
179 PATTERN = r"^/[^/]+/(info/refs\?service=git-receive-pack|git-receive-pack)$"
167180 if unauthenticated_push:
168181 # DANGER ZONE: Don't require authentication for push'ing
169182 app.wsgi_app = dulwich_wrapped_app
192205
193206 if require_browser_auth:
194207 app.wsgi_app = httpauth.DigestFileHttpAuthMiddleware(
195 htdigest_file,
196 wsgi_app=app.wsgi_app
208 htdigest_file, wsgi_app=app.wsgi_app
197209 )
198210
199211 return app
22
33
44 def get_args_from_env():
5 repos = os.environ.get('KLAUS_REPOS', [])
5 repos = os.environ.get("KLAUS_REPOS", [])
66 if repos:
77 repos = repos.split()
8 args = (
9 repos,
10 os.environ.get('KLAUS_SITE_NAME', 'unnamed site')
11 )
8 args = (repos, os.environ.get("KLAUS_SITE_NAME", "unnamed site"))
129 kwargs = dict(
13 htdigest_file=os.environ.get('KLAUS_HTDIGEST_FILE'),
14 use_smarthttp=strtobool(os.environ.get('KLAUS_USE_SMARTHTTP', '0')),
10 htdigest_file=os.environ.get("KLAUS_HTDIGEST_FILE"),
11 use_smarthttp=strtobool(os.environ.get("KLAUS_USE_SMARTHTTP", "0")),
1512 require_browser_auth=strtobool(
16 os.environ.get('KLAUS_REQUIRE_BROWSER_AUTH', '0')),
17 disable_push=strtobool(os.environ.get('KLAUS_DISABLE_PUSH', '0')),
13 os.environ.get("KLAUS_REQUIRE_BROWSER_AUTH", "0")
14 ),
15 disable_push=strtobool(os.environ.get("KLAUS_DISABLE_PUSH", "0")),
1816 unauthenticated_push=strtobool(
19 os.environ.get('KLAUS_UNAUTHENTICATED_PUSH', '0')),
20 ctags_policy=os.environ.get('KLAUS_CTAGS_POLICY', 'none')
17 os.environ.get("KLAUS_UNAUTHENTICATED_PUSH", "0")
18 ),
19 ctags_policy=os.environ.get("KLAUS_CTAGS_POLICY", "none"),
2120 )
2221 return args, kwargs
22
33 args, kwargs = get_args_from_env()
44
5 if kwargs['htdigest_file']:
6 with open(kwargs['htdigest_file']) as file:
7 kwargs['htdigest_file'] = file
5 if kwargs["htdigest_file"]:
6 with open(kwargs["htdigest_file"]) as file:
7 kwargs["htdigest_file"] = file
88 application = make_app(*args, **kwargs)
99 else:
1010 application = make_app(*args, **kwargs)
55 from .wsgi_autoreloading import make_autoreloading_app
66
77
8 if 'KLAUS_REPOS' in os.environ:
9 warnings.warn("use KLAUS_REPOS_ROOT instead of KLAUS_REPOS for the autoreloader apps", DeprecationWarning)
8 if "KLAUS_REPOS" in os.environ:
9 warnings.warn(
10 "use KLAUS_REPOS_ROOT instead of KLAUS_REPOS for the autoreloader apps",
11 DeprecationWarning,
12 )
1013
1114 args, kwargs = get_args_from_env()
12 repos_root = os.environ.get('KLAUS_REPOS_ROOT') or os.environ['KLAUS_REPOS']
15 repos_root = os.environ.get("KLAUS_REPOS_ROOT") or os.environ["KLAUS_REPOS"]
1316 args = (repos_root,) + args[1:]
1417
15 if kwargs['htdigest_file']:
18 if kwargs["htdigest_file"]:
1619 # Cache the contents of the htdigest file, the application will not read
1720 # the file like object until later when called.
18 with io.open(kwargs['htdigest_file'], encoding='utf-8') as htdigest_file:
19 kwargs['htdigest_file'] = io.StringIO(htdigest_file.read())
21 with io.open(kwargs["htdigest_file"], encoding="utf-8") as htdigest_file:
22 kwargs["htdigest_file"] = io.StringIO(htdigest_file.read())
2023
2124 application = make_autoreloading_app(*args, **kwargs)
1717 Polls `dir` for changes every `interval` seconds and sets `should_reload`
1818 accordingly.
1919 """
20 glob_pattern = dir + '/*'
20 glob_pattern = dir + "/*"
2121 old_contents = glob.glob(glob_pattern)
2222 while 1:
2323 time.sleep(interval)
3636 if S.should_reload:
3737 # Refresh inner application with new repo list
3838 print("Reloading repository list...")
39 S.inner_app = make_app(glob.glob(repos_root + '/*'), *args, **kwargs)
39 S.inner_app = make_app(glob.glob(repos_root + "/*"), *args, **kwargs)
4040 S.should_reload = False
4141 return S.inner_app(environ, start_response)
4242
4646 poller_thread.start()
4747
4848 return app
49
4545 :return: path to the compressed version of the tagsfile
4646 """
4747 _, compressed_tagsfile_path = tempfile.mkstemp()
48 with open(uncompressed_tagsfile_path, 'rb') as uncompressed:
49 with gzip.open(compressed_tagsfile_path, 'wb', COMPRESSION_LEVEL) as compressed:
48 with open(uncompressed_tagsfile_path, "rb") as uncompressed:
49 with gzip.open(compressed_tagsfile_path, "wb", COMPRESSION_LEVEL) as compressed:
5050 shutil.copyfileobj(uncompressed, compressed)
5151 return compressed_tagsfile_path
5252
5757 :return: path to the uncompressed version of the tagsfile
5858 """
5959 _, uncompressed_tagsfile_path = tempfile.mkstemp()
60 with gzip.open(compressed_tagsfile_path, 'rb') as compressed:
61 with open(uncompressed_tagsfile_path, 'wb') as uncompressed:
60 with gzip.open(compressed_tagsfile_path, "rb") as compressed:
61 with open(uncompressed_tagsfile_path, "wb") as uncompressed:
6262 shutil.copyfileobj(compressed, uncompressed)
6363 return uncompressed_tagsfile_path
6464
6565
6666 MiB = 1024 * 1024
67
6768
6869 class CTagsCache(object):
6970 """A ctags cache. Both uncompressed and compressed entries are kept in
8384 - When the tagsfile is requested and it's in the compressed cache sector,
8485 it is moved back to the uncompressed sector prior to using it.
8586 """
86 def __init__(self, uncompressed_max_bytes=30*MiB, compressed_max_bytes=20*MiB):
87
88 def __init__(self, uncompressed_max_bytes=30 * MiB, compressed_max_bytes=20 * MiB):
8789 self.uncompressed_max_bytes = uncompressed_max_bytes
8890 self.compressed_max_bytes = compressed_max_bytes
8991 # Note: We use dulwich's LRU cache to store the tagsfile paths here,
9092 # but we could easily replace it by any other (LRU) cache implementation.
91 self._uncompressed_cache = LRUSizeCache(uncompressed_max_bytes, compute_size=os.path.getsize)
92 self._compressed_cache = LRUSizeCache(compressed_max_bytes, compute_size=os.path.getsize)
93 self._uncompressed_cache = LRUSizeCache(
94 uncompressed_max_bytes, compute_size=os.path.getsize
95 )
96 self._compressed_cache = LRUSizeCache(
97 compressed_max_bytes, compute_size=os.path.getsize
98 )
9399 self._clearing = False
94100 self._lock = threading.Lock()
95101
127133
128134 if git_rev in self._compressed_cache:
129135 compressed_tagsfile_path = self._compressed_cache[git_rev]
130 uncompressed_tagsfile_path = uncompress_tagsfile(compressed_tagsfile_path)
131 self._compressed_cache._remove_node(self._compressed_cache._cache[git_rev])
136 uncompressed_tagsfile_path = uncompress_tagsfile(
137 compressed_tagsfile_path
138 )
139 self._compressed_cache._remove_node(
140 self._compressed_cache._cache[git_rev]
141 )
132142 else:
133143 # Not in cache.
134144 uncompressed_tagsfile_path = create_tagsfile(git_repo_path, git_rev)
135 self._uncompressed_cache.add(git_rev, uncompressed_tagsfile_path,
136 self._clear_uncompressed_entry)
145 self._uncompressed_cache.add(
146 git_rev, uncompressed_tagsfile_path, self._clear_uncompressed_entry
147 )
137148 return uncompressed_tagsfile_path
138149
139150 def _clear_uncompressed_entry(self, git_rev, uncompressed_tagsfile_path):
148159 if not self._clearing:
149160 # If we're clearing the whole cache, don't waste time moving tagsfiles
150161 # from uncompressed to compressed cache, but remove them directly instead.
151 self._compressed_cache.add(git_rev, compress_tagsfile(uncompressed_tagsfile_path),
152 self._clear_compressed_entry)
162 self._compressed_cache.add(
163 git_rev,
164 compress_tagsfile(uncompressed_tagsfile_path),
165 self._clear_compressed_entry,
166 )
153167 delete_tagsfile(uncompressed_tagsfile_path)
154168
155169 def _clear_compressed_entry(self, git_rev, compressed_tagsfile_path):
77 def check_have_exuberant_ctags():
88 """Check that the 'ctags' binary is *Exuberant* ctags (not etags etc)"""
99 try:
10 return b"Exuberant" in subprocess.check_output(["ctags", "--version"], stderr=subprocess.PIPE)
10 return b"Exuberant" in subprocess.check_output(
11 ["ctags", "--version"], stderr=subprocess.PIPE
12 )
1113 except subprocess.CalledProcessError:
1214 return False
1315
2022
2123 :return: path to the generated tagsfile
2224 """
23 assert check_have_exuberant_ctags(), "'ctags' binary is missing or not *Exuberant* ctags"
25 assert (
26 check_have_exuberant_ctags()
27 ), "'ctags' binary is missing or not *Exuberant* ctags"
2428
2529 _, target_tagsfile = tempfile.mkstemp()
2630 checkout_tmpdir = tempfile.mkdtemp()
2731 try:
28 subprocess.check_call(["git", "clone", "-q", "--shared", git_repo_path, checkout_tmpdir])
32 subprocess.check_call(
33 ["git", "clone", "-q", "--shared", git_repo_path, checkout_tmpdir]
34 )
2935 subprocess.check_call(["git", "checkout", "-q", git_rev], cwd=checkout_tmpdir)
30 subprocess.check_call(["ctags", "--fields=+l", "-Rno", target_tagsfile], cwd=checkout_tmpdir)
36 subprocess.check_call(
37 ["ctags", "--fields=+l", "-Rno", target_tagsfile], cwd=checkout_tmpdir
38 )
3139 finally:
3240 shutil.rmtree(checkout_tmpdir)
3341 return target_tagsfile
2424 end -= 1
2525 end += 1
2626 if start or end:
27
2728 def do(l, tag):
2829 last = end + len(l)
29 return b''.join(
30 [l[:start], b'<', tag, b'>', l[start:last], b'</', tag, b'>',
31 l[last:]])
32 old_line = do(old_line, b'del')
33 new_line = do(new_line, b'ins')
30 return b"".join(
31 [l[:start], b"<", tag, b">", l[start:last], b"</", tag, b">", l[last:]]
32 )
33
34 old_line = do(old_line, b"del")
35 new_line = do(new_line, b"ins")
3436 return old_line, new_line
3537
3638
3941 actions = []
4042 chunks = []
4143 for group in SequenceMatcher(None, a, b).get_grouped_opcodes(n):
42 old_line, old_end, new_line, new_end = group[0][1], group[-1][2], group[0][3], group[-1][4]
44 old_line, old_end, new_line, new_end = (
45 group[0][1],
46 group[-1][2],
47 group[0][3],
48 group[-1][4],
49 )
4350 lines = []
51
4452 def add_line(old_lineno, new_lineno, action, line):
4553 actions.append(action)
46 lines.append({
47 'old_lineno': old_lineno,
48 'new_lineno': new_lineno,
49 'action': action,
50 'line': line,
51 'no_newline': not line.endswith(b'\n')
52 })
54 lines.append(
55 {
56 "old_lineno": old_lineno,
57 "new_lineno": new_lineno,
58 "action": action,
59 "line": line,
60 "no_newline": not line.endswith(b"\n"),
61 }
62 )
63
5364 chunks.append(lines)
5465 for tag, i1, i2, j1, j2 in group:
55 if tag == 'equal':
66 if tag == "equal":
5667 for c, line in enumerate(a[i1:i2]):
57 add_line(i1+c, j1+c, 'unmod', e(line))
58 elif tag == 'insert':
68 add_line(i1 + c, j1 + c, "unmod", e(line))
69 elif tag == "insert":
5970 for c, line in enumerate(b[j1:j2]):
60 add_line(None, j1+c, 'add', e(line))
61 elif tag == 'delete':
71 add_line(None, j1 + c, "add", e(line))
72 elif tag == "delete":
6273 for c, line in enumerate(a[i1:i2]):
63 add_line(i1+c, None, 'del', e(line))
64 elif tag == 'replace':
74 add_line(i1 + c, None, "del", e(line))
75 elif tag == "replace":
6576 for c, line in enumerate(a[i1:i2]):
66 add_line(i1+c, None, 'del', e(line))
77 add_line(i1 + c, None, "del", e(line))
6778 for c, line in enumerate(b[j1:j2]):
68 add_line(None, j1+c, 'add', e(line))
79 add_line(None, j1 + c, "add", e(line))
6980 else:
70 raise AssertionError('unknown tag %s' % tag)
81 raise AssertionError("unknown tag %s" % tag)
7182
72 return actions.count('add'), actions.count('del'), chunks
83 return actions.count("add"), actions.count("del"), chunks
00 from six.moves import filter
11
22 from pygments import highlight
3 from pygments.lexers import get_lexer_by_name, get_lexer_for_filename, \
4 guess_lexer, ClassNotFound, TextLexer
3 from pygments.lexers import (
4 get_lexer_by_name,
5 get_lexer_for_filename,
6 guess_lexer,
7 ClassNotFound,
8 TextLexer,
9 )
510 from pygments.formatters import HtmlFormatter
611
712 from klaus import markup
1318 "REXX Ruby SML SQL Scheme Sh Tcl Tex VHDL Verilog Vim"
1419 # Not supported by Pygments: Asp Ant BETA Flex SLang Vera YACC
1520 ).split()
16 PYGMENTS_CTAGS_LANGUAGE_MAP = dict((get_lexer_by_name(l).name, l) for l in CTAGS_SUPPORTED_LANGUAGES)
21 PYGMENTS_CTAGS_LANGUAGE_MAP = dict(
22 (get_lexer_by_name(l).name, l) for l in CTAGS_SUPPORTED_LANGUAGES
23 )
1724
1825
1926 class KlausDefaultFormatter(HtmlFormatter):
2027 def __init__(self, language, ctags, **kwargs):
21 HtmlFormatter.__init__(self, linenos='table', lineanchors='L',
22 linespans='L', anchorlinenos=True, **kwargs)
28 HtmlFormatter.__init__(
29 self,
30 linenos="table",
31 lineanchors="L",
32 linespans="L",
33 anchorlinenos=True,
34 **kwargs
35 )
2336 self.language = language
2437 if ctags:
2538 # Use Pygments' ctags system but provide our own CTags instance
3043 for tag, line in HtmlFormatter._format_lines(self, tokensource):
3144 if tag == 1:
3245 # sourcecode line
33 line = '<span class=line>%s</span>' % line
46 line = "<span class=line>%s</span>" % line
3447 yield tag, line
3548
3649 def _lookup_ctag(self, token):
3952 if not best_matches:
4053 return None, None
4154 else:
42 return (best_matches[0]['file'].decode("utf-8"),
43 best_matches[0]['lineNumber'])
55 return (
56 best_matches[0]["file"].decode("utf-8"),
57 best_matches[0]["lineNumber"],
58 )
4459
4560 def _get_all_ctags_matches(self, token):
46 FIELDS = ('file', 'lineNumber', 'kind', b'language')
61 FIELDS = ("file", "lineNumber", "kind", b"language")
4762 from ctags import TagEntry
63
4864 entry = TagEntry() # target "buffer" for ctags
4965 if self._ctags.find(entry, token.encode("utf-8"), 0):
5066 yield dict((k, entry[k]) for k in FIELDS)
5571 if self.language is None:
5672 return matches
5773 else:
58 return filter(lambda match: match[b'language'] == self.language.encode("utf-8"), matches)
74 return filter(
75 lambda match: match[b"language"] == self.language.encode("utf-8"),
76 matches,
77 )
5978
6079
6180 class KlausPythonFormatter(KlausDefaultFormatter):
6887 # import of the tag in some other file. We change the tag lookup mechanics
6988 # so that non-import matches are always preferred over import matches.
7089 return filter(
71 lambda match: match['kind'] != b'i',
72 super(KlausPythonFormatter, self).get_best_ctags_matches(matches)
90 lambda match: match["kind"] != b"i",
91 super(KlausPythonFormatter, self).get_best_ctags_matches(matches),
7392 )
7493
7594
76 def highlight_or_render(code, filename, render_markup=True, ctags=None, ctags_baseurl=None):
95 def highlight_or_render(
96 code, filename, render_markup=True, ctags=None, ctags_baseurl=None
97 ):
7798 """Render code using Pygments, markup (markdown, rst, ...) using the
7899 corresponding renderer, if available.
79100
95116 lexer = TextLexer()
96117
97118 formatter_cls = {
98 'Python': KlausPythonFormatter,
119 "Python": KlausPythonFormatter,
99120 }.get(lexer.name, KlausDefaultFormatter)
100121 if ctags:
101122 ctags_urlscheme = ctags_baseurl + "%(path)s%(fname)s%(fext)s"
2727 return
2828
2929 def render_markdown(content):
30 return markdown.markdown(content, extensions=['toc', 'extra'])
30 return markdown.markdown(content, extensions=["toc", "extra"])
3131
32 LANGUAGES.append((['.md', '.mkdn', '.mdwn', '.markdown'], render_markdown))
32 LANGUAGES.append(([".md", ".mkdn", ".mdwn", ".markdown"], render_markdown))
3333
3434
3535 def _load_restructured_text():
4242 def render_rest(content):
4343 # start by h2 and ignore invalid directives and so on
4444 # (most likely from Sphinx)
45 settings = {'initial_header_level': 2, 'report_level': 0}
46 return publish_parts(content,
47 writer=Writer(),
48 settings_overrides=settings).get('html_body')
45 settings = {"initial_header_level": 2, "report_level": 0}
46 return publish_parts(content, writer=Writer(), settings_overrides=settings).get(
47 "html_body"
48 )
4949
50 LANGUAGES.append((['.rst', '.rest'], render_rest))
50 LANGUAGES.append(([".rst", ".rest"], render_rest))
5151
5252
5353 for loader in [_load_markdown, _load_restructured_text]:
88 from dulwich.errors import NotTreeError
99 import dulwich, dulwich.patch
1010
11 from klaus.utils import force_unicode, parent_directory, repo_human_name, \
12 encode_for_git, decode_from_git
11 from klaus.utils import (
12 force_unicode,
13 parent_directory,
14 repo_human_name,
15 encode_for_git,
16 decode_from_git,
17 )
1318 from klaus.diff import render_diff
1419
1520
16 NOT_SET = '__not_set__'
21 NOT_SET = "__not_set__"
1722
1823
1924 def cached_call(key, validator, producer, _cache={}):
2631
2732 class FancyRepo(dulwich.repo.Repo):
2833 """A wrapper around Dulwich's Repo that adds some helper methods."""
34
2935 @property
3036 def name(self):
3137 return repo_human_name(self.path)
3743 # If self.get_refs() has changed, we should invalidate the cache.
3844 all_refs = self.get_refs()
3945 return cached_call(
40 key=(id(self), 'get_last_updated_at'),
46 key=(id(self), "get_last_updated_at"),
4147 validator=all_refs,
42 producer=lambda: self._get_last_updated_at(all_refs)
48 producer=lambda: self._get_last_updated_at(all_refs),
4349 )
4450
4551 def _get_last_updated_at(self, all_refs):
5157 # Whoops. The ref points at a non-existant object
5258 pass
5359 resolveable_refs.sort(
54 key=lambda obj:getattr(obj, 'commit_time', float('-inf')),
55 reverse=True
60 key=lambda obj: getattr(obj, "commit_time", float("-inf")), reverse=True
5661 )
5762 for ref in resolveable_refs:
5863 # Find the latest ref that has a commit_time; tags do not
6469 @property
6570 def cloneurl(self):
6671 """Retrieve the gitweb notion of the public clone URL of this repo."""
67 f = self.get_named_file('cloneurl')
72 f = self.get_named_file("cloneurl")
6873 if f is not None:
6974 return f.read()
7075 c = self.get_config()
7176 try:
72 return force_unicode(c.get(b'gitweb', b'url'))
77 return force_unicode(c.get(b"gitweb", b"url"))
7378 except KeyError:
7479 return None
7580
7984 """
8085 # Cache result to speed up repo_list.html template.
8186 # If description file mtime has changed, we should invalidate the cache.
82 description_file = os.path.join(self._controldir, 'description')
87 description_file = os.path.join(self._controldir, "description")
8388 try:
84 description_mtime = os.stat(os.path.join(self._controldir, 'description')).st_mtime
89 description_mtime = os.stat(
90 os.path.join(self._controldir, "description")
91 ).st_mtime
8592 except OSError:
8693 description_mtime = None
8794
8895 return cached_call(
89 key=(id(self), 'get_description'),
96 key=(id(self), "get_description"),
9097 validator=description_mtime,
91 producer=self._get_description
98 producer=self._get_description,
9299 )
93100
94101 def _get_description(self):
100107
101108 def get_commit(self, rev):
102109 """Get commit object identified by `rev` (SHA or branch or tag name)."""
103 for prefix in ['refs/heads/', 'refs/tags/', '']:
110 for prefix in ["refs/heads/", "refs/tags/", ""]:
104111 key = prefix + rev
105112 try:
106113 obj = self[encode_for_git(key)]
113120
114121 def get_default_branch(self):
115122 """Tries to guess the default repo branch name."""
116 for candidate in ['master', 'trunk', 'default', 'gh-pages']:
123 for candidate in ["master", "trunk", "default", "gh-pages"]:
117124 try:
118125 self.get_commit(candidate)
119126 return candidate
128135 """Return a list of ref names that begin with `prefix`, ordered by the
129136 time they have been committed to last.
130137 """
138
131139 def get_commit_time(refname):
132140 try:
133141 obj = self[refs[refname]]
149157 """Return a list of branch names of this repo, ordered by the time they
150158 have been committed to last.
151159 """
152 return self.get_ref_names_ordered_by_last_commit('refs/heads', exclude)
160 return self.get_ref_names_ordered_by_last_commit("refs/heads", exclude)
153161
154162 def get_tag_names(self):
155163 """Return a list of tag names of this repo, ordered by creation time."""
156 return self.get_ref_names_ordered_by_last_commit('refs/tags')
164 return self.get_ref_names_ordered_by_last_commit("refs/tags")
157165
158166 def get_tag_and_branch_shas(self):
159167 """Return a list of SHAs of all tags and branches."""
160 tag_shas = self.refs.as_dict(b'refs/tags/').values()
161 branch_shas = self.refs.as_dict(b'refs/heads/').values()
168 tag_shas = self.refs.as_dict(b"refs/tags/").values()
169 branch_shas = self.refs.as_dict(b"refs/heads/").values()
162170 return set(tag_shas) | set(branch_shas)
163171
164172 def history(self, commit, path=None, max_commits=None, skip=0):
175183 # Therefore we use `git log` here until dulwich gets faster.
176184 # For the pure-Python implementation, see the 'purepy-hist' branch.
177185
178 cmd = ['git', 'log', '--format=%H']
186 cmd = ["git", "log", "--format=%H"]
179187 if skip:
180 cmd.append('--skip=%d' % skip)
188 cmd.append("--skip=%d" % skip)
181189 if max_commits:
182 cmd.append('--max-count=%d' % max_commits)
190 cmd.append("--max-count=%d" % max_commits)
183191 cmd.append(decode_from_git(commit.id))
184192 if path:
185 cmd.extend(['--', path])
193 cmd.extend(["--", path])
186194
187195 output = subprocess.check_output(cmd, cwd=os.path.abspath(self.path))
188 sha1_sums = output.strip().split(b'\n')
196 sha1_sums = output.strip().split(b"\n")
189197 return [self[sha1] for sha1 in sha1_sums]
190198
191199 def blame(self, commit, path):
193201 the file, the list contains the commit that last changed that line.
194202 """
195203 # XXX see comment in `.history()`
196 cmd = ['git', 'blame', '-ls', '--root', decode_from_git(commit.id), '--', path]
204 cmd = ["git", "blame", "-ls", "--root", decode_from_git(commit.id), "--", path]
197205 output = subprocess.check_output(cmd, cwd=os.path.abspath(self.path))
198 sha1_sums = [line[:40] for line in output.strip().split(b'\n')]
199 return [None if self[sha1] is None else decode_from_git(self[sha1].id) for sha1 in sha1_sums]
206 sha1_sums = [line[:40] for line in output.strip().split(b"\n")]
207 return [
208 None if self[sha1] is None else decode_from_git(self[sha1].id)
209 for sha1 in sha1_sums
210 ]
200211
201212 def get_blob_or_tree(self, commit, path):
202213 """Return the Git tree or blob object for `path` at `commit`."""
203214 try:
204 (mode, oid) = tree_lookup_path(self.__getitem__, commit.tree,
205 encode_for_git(path))
215 (mode, oid) = tree_lookup_path(
216 self.__getitem__, commit.tree, encode_for_git(path)
217 )
206218 except NotTreeError:
207219 # Some part of the path was a file where a folder was expected.
208220 # Example: path="/path/to/foo.txt" but "to" is a file in "/path".
233245 dirs.sort(key=keyfunc)
234246
235247 if path:
236 dirs.insert(0, ('..', parent_directory(path)))
237
238 return {'submodules': submodules, 'dirs' : dirs, 'files' : files}
248 dirs.insert(0, ("..", parent_directory(path)))
249
250 return {"submodules": submodules, "dirs": dirs, "files": files}
239251
240252 def commit_diff(self, commit):
241253 """Return the list of changes introduced by `commit`."""
246258 else:
247259 parent_tree = None
248260
249 summary = {'nfiles': 0, 'nadditions': 0, 'ndeletions': 0}
261 summary = {"nfiles": 0, "nadditions": 0, "ndeletions": 0}
250262 file_changes = [] # the changes in detail
251263
252264 dulwich_changes = self.object_store.tree_changes(parent_tree, commit.tree)
253265 for (oldpath, newpath), (oldmode, newmode), (oldsha, newsha) in dulwich_changes:
254 summary['nfiles'] += 1
255 try:
256 oldblob = self.object_store[oldsha] if oldsha else Blob.from_string(b'')
257 newblob = self.object_store[newsha] if newsha else Blob.from_string(b'')
266 summary["nfiles"] += 1
267 try:
268 oldblob = self.object_store[oldsha] if oldsha else Blob.from_string(b"")
269 newblob = self.object_store[newsha] if newsha else Blob.from_string(b"")
258270 except KeyError:
259271 # newsha/oldsha are probably related to submodules.
260272 # Dulwich will handle that.
261273 pass
262274
263275 # Check for binary files -- can't show diffs for these
264 if guess_is_binary(newblob) or \
265 guess_is_binary(oldblob):
266 file_changes.append({
267 'is_binary': True,
268 'old_filename': oldpath or '/dev/null',
269 'new_filename': newpath or '/dev/null',
270 'chunks': None
271 })
276 if guess_is_binary(newblob) or guess_is_binary(oldblob):
277 file_changes.append(
278 {
279 "is_binary": True,
280 "old_filename": oldpath or "/dev/null",
281 "new_filename": newpath or "/dev/null",
282 "chunks": None,
283 }
284 )
272285 continue
273286
274287 additions, deletions, chunks = render_diff(
275 oldblob.splitlines(), newblob.splitlines())
288 oldblob.splitlines(), newblob.splitlines()
289 )
276290 change = {
277 'is_binary': False,
278 'old_filename': oldpath or '/dev/null',
279 'new_filename': newpath or '/dev/null',
280 'chunks': chunks,
281 'additions': additions,
282 'deletions': deletions,
291 "is_binary": False,
292 "old_filename": oldpath or "/dev/null",
293 "new_filename": newpath or "/dev/null",
294 "chunks": chunks,
295 "additions": additions,
296 "deletions": deletions,
283297 }
284 summary['nadditions'] += additions
285 summary['ndeletions'] += deletions
298 summary["nadditions"] += additions
299 summary["ndeletions"] += deletions
286300 file_changes.append(change)
287301
288302 return summary, file_changes
293307 else:
294308 parent_tree = None
295309 bytesio = io.BytesIO()
296 dulwich.patch.write_tree_diff(bytesio, self.object_store, parent_tree, commit.tree)
310 dulwich.patch.write_tree_diff(
311 bytesio, self.object_store, parent_tree, commit.tree
312 )
297313 return bytesio.getvalue()
298314
299315 def freeze(self):
304320 """A special version of FancyRepo that assumes the underlying Git
305321 repository does not change. Used for performance optimizations.
306322 """
323
307324 def __init__(self, repo):
308325 self.__repo = repo
309326 self.__last_updated_at = NOT_SET
310327
311328 def __setattr__(self, name, value):
312 if not name.startswith('_FrozenFancyRepo__'):
329 if not name.startswith("_FrozenFancyRepo__"):
313330 raise TypeError("Can't set %s attribute on FrozenFancyRepo" % name)
314331 super(FrozenFancyRepo, self).__setattr__(name, value)
315332
324341
325342 class InvalidRepo:
326343 """Represent an invalid repository and store pertinent data."""
344
327345 def __init__(self, path):
328346 self.path = path
329347
88 import warnings
99 import subprocess
1010 import six
11
1112 try:
1213 import chardet
1314 except ImportError:
4748 :param app: the WSGI application
4849 :param num_proxies: the number of proxy servers in front of the app.
4950 """
51
5052 def __call__(self, environ, start_response):
51 script_name = environ.get('HTTP_X_SCRIPT_NAME')
53 script_name = environ.get("HTTP_X_SCRIPT_NAME")
5254 if script_name is not None:
53 if script_name.endswith('/'):
54 warnings.warn(
55 "'X-Script-Name' header should not end in '/' (found: %r). "
56 "Please fix your proxy's configuration." % script_name)
57 script_name = script_name.rstrip('/')
58 environ['SCRIPT_NAME'] = script_name
55 if script_name.endswith("/"):
56 warnings.warn(
57 "'X-Script-Name' header should not end in '/' (found: %r). "
58 "Please fix your proxy's configuration." % script_name
59 )
60 script_name = script_name.rstrip("/")
61 environ["SCRIPT_NAME"] = script_name
5962 return super(ProxyFix, self).__call__(environ, start_response)
6063
6164
7578
7679 Snippet stolen from http://flask.pocoo.org/snippets/35/
7780 """
81
7882 def __init__(self, app):
7983 warnings.warn(
8084 "'klaus.utils.SubUri' is deprecated and will be removed. "
8185 "Please upgrade your code to use 'klaus.utils.ProxyFix' instead.",
82 DeprecationWarning
86 DeprecationWarning,
8387 )
8488 self.app = app
8589
8690 def __call__(self, environ, start_response):
87 script_name = environ.get('HTTP_X_SCRIPT_NAME', '')
91 script_name = environ.get("HTTP_X_SCRIPT_NAME", "")
8892 if script_name:
89 environ['SCRIPT_NAME'] = script_name.rstrip('/')
90
91 if script_name and environ['PATH_INFO'].startswith(script_name):
93 environ["SCRIPT_NAME"] = script_name.rstrip("/")
94
95 if script_name and environ["PATH_INFO"].startswith(script_name):
9296 # strip `script_name` from PATH_INFO
93 environ['PATH_INFO'] = environ['PATH_INFO'][len(script_name):]
94
95 if 'HTTP_X_SCHEME' in environ:
96 environ['wsgi.url_scheme'] = environ['HTTP_X_SCHEME']
97 environ["PATH_INFO"] = environ["PATH_INFO"][len(script_name) :]
98
99 if "HTTP_X_SCHEME" in environ:
100 environ["wsgi.url_scheme"] = environ["HTTP_X_SCHEME"]
97101
98102 return self.app(environ, start_response)
99103
104108
105109
106110 def formattimestamp(timestamp):
107 return datetime.datetime.fromtimestamp(timestamp).strftime('%b %d, %Y %H:%M:%S')
111 return datetime.datetime.fromtimestamp(timestamp).strftime("%b %d, %Y %H:%M:%S")
108112
109113
110114 def guess_is_binary(dulwich_blob):
111 return any(b'\0' in chunk for chunk in dulwich_blob.chunked)
115 return any(b"\0" in chunk for chunk in dulwich_blob.chunked)
112116
113117
114118 def guess_is_image(filename):
115119 mime, _ = mimetypes.guess_type(filename)
116120 if mime is None:
117121 return False
118 return mime.startswith('image/')
122 return mime.startswith("image/")
119123
120124
121125 def encode_for_git(s):
122126 # XXX This assumes everything to be UTF-8 encoded
123 return s.encode('utf8')
127 return s.encode("utf8")
124128
125129
126130 def decode_from_git(b):
127131 # XXX This assumes everything to be UTF-8 encoded
128 return b.decode('utf8')
132 return b.decode("utf8")
129133
130134
131135 def force_unicode(s):
137141 last_exc = None
138142 # Try some default encodings:
139143 try:
140 return s.decode('utf-8')
144 return s.decode("utf-8")
141145 except UnicodeDecodeError as exc:
142146 last_exc = exc
143147 try:
147151
148152 if chardet is not None:
149153 # Try chardet, if available
150 encoding = chardet.detect(s)['encoding']
154 encoding = chardet.detect(s)["encoding"]
151155 if encoding is not None:
152156 return s.decode(encoding)
153157
163167 >>> extract_author_name("noname@example.com")
164168 "noname@example.com"
165169 """
166 match = re.match('^(.*?)<.*?>$', email)
170 match = re.match("^(.*?)<.*?>$", email)
167171 if match:
168172 return match.group(1).strip()
169173 return email
171175
172176 def is_hex_prefix(s):
173177 if len(s) % 2:
174 s += '0'
178 s += "0"
175179 try:
176180 binascii.unhexlify(s)
177181 return True
196200 [('foo', 'foo'), ('bar', 'foo/bar'), ('spam', 'foo/bar/spam')]
197201 """
198202 seen = []
199 for part in path.split('/'):
203 for part in path.split("/"):
200204 seen.append(part)
201 yield part, '/'.join(seen)
205 yield part, "/".join(seen)
202206
203207
204208 def shorten_message(msg):
205 return msg.split('\n')[0]
209 return msg.split("\n")[0]
206210
207211
208212 def replace_dupes(ls, replacement):
229233 This is used to display the "powered by klaus $VERSION" footer on each page,
230234 $VERSION being either the SHA guessed by this function or the latest release number.
231235 """
232 git_dir = os.path.join(os.path.dirname(__file__), '..', '.git')
233 try:
234 return force_unicode(subprocess.check_output(
235 ['git', 'log', '--format=%h', '-n', '1'],
236 cwd=git_dir
237 ).strip())
236 git_dir = os.path.join(os.path.dirname(__file__), "..", ".git")
237 try:
238 return force_unicode(
239 subprocess.check_output(
240 ["git", "log", "--format=%h", "-n", "1"], cwd=git_dir
241 ).strip()
242 )
238243 except OSError:
239244 # Either the git executable couldn't be found in the OS's PATH
240245 # or no ".git" directory exists, i.e. this is no "bleeding-edge" installation.
241246 return None
242247
243248
244 def sanitize_branch_name(name, chars='./', repl='-'):
249 def sanitize_branch_name(name, chars="./", repl="-"):
245250 for char in chars:
246251 name = name.replace(char, repl)
247252 return name
248253
249254
250255 def escape_html(s):
251 return s.replace(b'&', b'&amp;').replace(b'<', b'&lt;') \
252 .replace(b'>', b'&gt;').replace(b'"', b'&quot;')
256 return (
257 s.replace(b"&", b"&amp;")
258 .replace(b"<", b"&lt;")
259 .replace(b">", b"&gt;")
260 .replace(b'"', b"&quot;")
261 )
253262
254263
255264 def tarball_basename(repo_name, rev):
256265 """Determine the name for a tarball."""
257 rev = sanitize_branch_name(rev, chars='/')
258 if rev.startswith(repo_name + '-'):
266 rev = sanitize_branch_name(rev, chars="/")
267 if rev.startswith(repo_name + "-"):
259268 # If the rev is a tag name that already starts with the repo name,
260269 # skip it.
261270 return rev
262 elif len(rev) >= 2 and rev[0] == 'v' and not rev[1].isalpha():
271 elif len(rev) >= 2 and rev[0] == "v" and not rev[1].isalpha():
263272 # If the rev is a tag name prefixed by a 'v', skip the 'v'.
264273 # So, v-1.0 -> 1.0, v1.0 -> 1.0, but vanilla -> vanilla.
265274 return "%s-%s" % (repo_name, rev[1:])
278287 3. /x/y -> y
279288 """
280289 name = path.rstrip(os.sep).split(os.sep)[-1]
281 if name.endswith('.git'):
290 if name.endswith(".git"):
282291 name = name[:-4]
283292 return name
1818 ctags = None
1919 else:
2020 from klaus import ctagscache
21
2122 CTAGS_CACHE = ctagscache.CTagsCache()
2223
2324 from klaus import markup
2425 from klaus.highlighting import highlight_or_render
25 from klaus.utils import parent_directory, subpaths, force_unicode, guess_is_binary, \
26 guess_is_image, replace_dupes, sanitize_branch_name, encode_for_git
27
28
29 README_FILENAMES = [b'README', b'README.md', b'README.mkdn', b'README.mdwn', b'README.markdown', b'README.rst']
26 from klaus.utils import (
27 parent_directory,
28 subpaths,
29 force_unicode,
30 guess_is_binary,
31 guess_is_image,
32 replace_dupes,
33 sanitize_branch_name,
34 encode_for_git,
35 )
36
37
38 README_FILENAMES = [
39 b"README",
40 b"README.md",
41 b"README.mkdn",
42 b"README.mdwn",
43 b"README.markdown",
44 b"README.rst",
45 ]
3046
3147
3248 def repo_list():
3349 """Show a list of all repos and can be sorted by last update."""
34 if 'by-name' in request.args:
35 order_by = 'name'
50 if "by-name" in request.args:
51 order_by = "name"
3652 sort_key = lambda repo: repo.name
3753 else:
38 order_by = 'last_updated'
54 order_by = "last_updated"
3955 sort_key = lambda repo: (-(repo.fast_get_last_updated_at() or -1), repo.name)
40 repos = sorted([repo.freeze() for repo in current_app.valid_repos.values()],
41 key=sort_key)
42 invalid_repos = sorted(current_app.invalid_repos.values(), key=lambda repo: repo.name)
43 return render_template('repo_list.html', repos=repos, invalid_repos=invalid_repos,
44 order_by=order_by, base_href=None)
45
56 repos = sorted(
57 [repo.freeze() for repo in current_app.valid_repos.values()], key=sort_key
58 )
59 invalid_repos = sorted(
60 current_app.invalid_repos.values(), key=lambda repo: repo.name
61 )
62 return render_template(
63 "repo_list.html",
64 repos=repos,
65 invalid_repos=invalid_repos,
66 order_by=order_by,
67 base_href=None,
68 )
4669
4770
4871 def robots_txt():
4972 """Serve the robots.txt file to manage the indexing of the site by search engines."""
50 return current_app.send_static_file('robots.txt')
73 return current_app.send_static_file("robots.txt")
5174
5275
5376 def _get_repo_and_rev(repo, rev=None, path=None):
82105
83106 def _get_submodule(repo, commit, path):
84107 """Retrieve submodule URL and path."""
85 submodule_blob = repo.get_blob_or_tree(commit, '.gitmodules')
108 submodule_blob = repo.get_blob_or_tree(commit, ".gitmodules")
86109 config = dulwich.config.ConfigFile.from_file(
87 BytesIO(submodule_blob.as_raw_string()))
88 key = (b'submodule', path)
89 submodule_url = config.get(key, b'url')
90 submodule_path = config.get(key, b'path')
110 BytesIO(submodule_blob.as_raw_string())
111 )
112 key = (b"submodule", path)
113 submodule_url = config.get(key, b"url")
114 submodule_path = config.get(key, b"path")
91115 return (submodule_url, submodule_path)
92116
93117
103127 is "/foo/bar", only commits related to "/foo/bar" are displayed, and if
104128 `rev` is "master", the history of the "master" branch is displayed.
105129 """
130
106131 def __init__(self, view_name):
107132 self.view_name = view_name
108133 self.context = {}
109134
110 def dispatch_request(self, repo, rev=None, path=''):
135 def dispatch_request(self, repo, rev=None, path=""):
111136 """Dispatch repository, revision (if any) and path (if any). To retain
112137 compatibility with :func:`url_for`, view routing uses two arguments:
113138 rev and path, although a single path is sufficient (from Git's point of
120145
121146 [1] https://github.com/jonashaag/klaus/issues/36#issuecomment-23990266
122147 """
123 self.make_template_context(repo, rev, path.strip('/'))
148 self.make_template_context(repo, rev, path.strip("/"))
124149 return self.get_response()
125150
126151 def get_response(self):
135160 raise NotFound("File not found")
136161
137162 self.context = {
138 'view': self.view_name,
139 'repo': repo,
140 'rev': rev,
141 'commit': commit,
142 'branches': repo.get_branch_names(exclude=rev),
143 'tags': repo.get_tag_names(),
144 'path': path,
145 'blob_or_tree': blob_or_tree,
146 'subpaths': list(subpaths(path)) if path else None,
147 'base_href': None,
163 "view": self.view_name,
164 "repo": repo,
165 "rev": rev,
166 "commit": commit,
167 "branches": repo.get_branch_names(exclude=rev),
168 "tags": repo.get_tag_names(),
169 "path": path,
170 "blob_or_tree": blob_or_tree,
171 "subpaths": list(subpaths(path)) if path else None,
172 "base_href": None,
148173 }
149174
150175
151176 class CommitView(BaseRepoView):
152 template_name = 'view_commit.html'
177 template_name = "view_commit.html"
153178
154179
155180 class PatchView(BaseRepoView):
156181 def get_response(self):
157182 return Response(
158 self.context['repo'].raw_commit_diff(self.context['commit']),
159 mimetype='text/plain',
183 self.context["repo"].raw_commit_diff(self.context["commit"]),
184 mimetype="text/plain",
160185 )
161186
162187
163188 class TreeViewMixin(object):
164189 """The logic required for displaying the current directory in the sidebar."""
190
165191 def make_template_context(self, *args):
166192 super(TreeViewMixin, self).make_template_context(*args)
167 self.context['root_tree'] = self.listdir()
193 self.context["root_tree"] = self.listdir()
168194
169195 def listdir(self):
170196 """Return a list of directories and files in the current path of the selected commit."""
171197 root_directory = self.get_root_directory()
172 return self.context['repo'].listdir(
173 self.context['commit'],
174 root_directory
175 )
198 return self.context["repo"].listdir(self.context["commit"], root_directory)
176199
177200 def get_root_directory(self):
178 root_directory = self.context['path']
179 if isinstance(self.context['blob_or_tree'], dulwich.objects.Blob):
201 root_directory = self.context["path"]
202 if isinstance(self.context["blob_or_tree"], dulwich.objects.Blob):
180203 # 'path' is a file (not folder) name
181204 root_directory = parent_directory(root_directory)
182205 return root_directory
184207
185208 class HistoryView(TreeViewMixin, BaseRepoView):
186209 """Show commits of a branch + path, just like `git log`. With pagination."""
187 template_name = 'history.html'
210
211 template_name = "history.html"
188212
189213 def make_template_context(self, *args):
190214 super(HistoryView, self).make_template_context(*args)
191215
192216 try:
193 page = int(request.args.get('page'))
217 page = int(request.args.get("page"))
194218 except (TypeError, ValueError):
195219 page = 0
196220
197 self.context['page'] = page
221 self.context["page"] = page
198222
199223 history_length = 30
200224 if page:
201 skip = (self.context['page']-1) * 30 + 10
225 skip = (self.context["page"] - 1) * 30 + 10
202226 if page > 7:
203 self.context['previous_pages'] = [0, 1, 2, None] + list(range(page))[-3:]
227 self.context["previous_pages"] = [0, 1, 2, None] + list(range(page))[
228 -3:
229 ]
204230 else:
205 self.context['previous_pages'] = range(page)
231 self.context["previous_pages"] = range(page)
206232 else:
207233 skip = 0
208234
209 history = self.context['repo'].history(
210 self.context['commit'],
211 self.context['path'],
212 history_length + 1,
213 skip
235 history = self.context["repo"].history(
236 self.context["commit"], self.context["path"], history_length + 1, skip
214237 )
215238 if len(history) == history_length + 1:
216239 # At least one more commit for next page left
220243 else:
221244 more_commits = False
222245
223 self.context.update({
224 'history': history,
225 'more_commits': more_commits,
226 })
246 self.context.update(
247 {
248 "history": history,
249 "more_commits": more_commits,
250 }
251 )
227252
228253
229254 class IndexView(TreeViewMixin, BaseRepoView):
230255 """Show commits of a branch, just like `git log`.
231256
232257 Also, README, if available."""
233 template_name = 'index.html'
258
259 template_name = "index.html"
234260
235261 def _get_readme(self):
236 tree = self.context['repo'][self.context['commit'].tree]
262 tree = self.context["repo"][self.context["commit"].tree]
237263 for name in README_FILENAMES:
238264 if name in tree:
239 readme_data = self.context['repo'][tree[name][1]].data
265 readme_data = self.context["repo"][tree[name][1]].data
240266 readme_filename = name
241267 return (readme_filename, readme_data)
242268 else:
245271 def make_template_context(self, *args):
246272 super(IndexView, self).make_template_context(*args)
247273
248 self.context['base_href'] = url_for(
249 'blob',
250 repo=self.context['repo'].name,
251 rev=self.context['rev'],
252 path=''
253 )
254
255 self.context['page'] = 0
274 self.context["base_href"] = url_for(
275 "blob", repo=self.context["repo"].name, rev=self.context["rev"], path=""
276 )
277
278 self.context["page"] = 0
256279 history_length = 10
257 history = self.context['repo'].history(
258 self.context['commit'],
259 self.context['path'],
280 history = self.context["repo"].history(
281 self.context["commit"],
282 self.context["path"],
260283 history_length + 1,
261284 skip=0,
262285 )
268291 else:
269292 more_commits = False
270293
271 self.context.update({
272 'history': history,
273 'more_commits': more_commits,
274 })
294 self.context.update(
295 {
296 "history": history,
297 "more_commits": more_commits,
298 }
299 )
275300 try:
276301 (readme_filename, readme_data) = self._get_readme()
277302 except KeyError:
278 self.context.update({
279 'is_markup': None,
280 'rendered_code': None,
281 })
282 else:
283 readme_filename = force_unicode(readme_filename)
303 self.context.update(
304 {
305 "is_markup": None,
306 "rendered_code": None,
307 }
308 )
309 else:
310 readme_filename = force_unicode(readme_filename)
284311 readme_data = force_unicode(readme_data)
285 self.context.update({
286 'is_markup': markup.can_render(readme_filename),
287 'rendered_code': highlight_or_render(readme_data, readme_filename)
288 })
312 self.context.update(
313 {
314 "is_markup": markup.can_render(readme_filename),
315 "rendered_code": highlight_or_render(readme_data, readme_filename),
316 }
317 )
289318
290319
291320 class BaseBlobView(BaseRepoView):
292321 def make_template_context(self, *args):
293322 super(BaseBlobView, self).make_template_context(*args)
294 if not isinstance(self.context['blob_or_tree'], dulwich.objects.Blob):
323 if not isinstance(self.context["blob_or_tree"], dulwich.objects.Blob):
295324 raise NotFound("Not a blob")
296 self.context['filename'] = os.path.basename(self.context['path'])
325 self.context["filename"] = os.path.basename(self.context["path"])
297326
298327
299328 class SubmoduleView(BaseRepoView):
300329 """Show an information page about a submodule."""
301 template_name = 'submodule.html'
330
331 template_name = "submodule.html"
302332
303333 def make_template_context(self, repo, rev, path):
304334 repo, rev, path, commit = _get_repo_and_rev(repo, rev, path)
305335
306336 try:
307337 submodule_rev = tree_lookup_path(
308 repo.__getitem__, commit.tree, encode_for_git(path))[1]
338 repo.__getitem__, commit.tree, encode_for_git(path)
339 )[1]
309340 except KeyError:
310341 raise NotFound("Parent path for submodule missing")
311342
312343 try:
313344 (submodule_url, submodule_path) = _get_submodule(
314 repo, commit, encode_for_git(path))
345 repo, commit, encode_for_git(path)
346 )
315347 except KeyError:
316348 submodule_url = None
317349 submodule_path = None
321353 # submodule_path, revision submodule_rev.
322354
323355 self.context = {
324 'view': self.view_name,
325 'repo': repo,
326 'rev': rev,
327 'commit': commit,
328 'branches': repo.get_branch_names(exclude=rev),
329 'tags': repo.get_tag_names(),
330 'path': path,
331 'subpaths': list(subpaths(path)) if path else None,
332 'submodule_url': force_unicode(submodule_url),
333 'submodule_path': force_unicode(submodule_path),
334 'submodule_rev': force_unicode(submodule_rev),
335 'base_href': None,
356 "view": self.view_name,
357 "repo": repo,
358 "rev": rev,
359 "commit": commit,
360 "branches": repo.get_branch_names(exclude=rev),
361 "tags": repo.get_tag_names(),
362 "path": path,
363 "subpaths": list(subpaths(path)) if path else None,
364 "submodule_url": force_unicode(submodule_url),
365 "submodule_path": force_unicode(submodule_path),
366 "submodule_rev": force_unicode(submodule_rev),
367 "base_href": None,
336368 }
337369
338370
339371 class BaseFileView(TreeViewMixin, BaseBlobView):
340372 """Base for FileView and BlameView."""
373
341374 def render_code(self, render_markup):
342 should_use_ctags = current_app.should_use_ctags(self.context['repo'],
343 self.context['commit'])
375 should_use_ctags = current_app.should_use_ctags(
376 self.context["repo"], self.context["commit"]
377 )
344378 if should_use_ctags:
345379 if ctags is None:
346380 raise ImportError("Ctags enabled but python-ctags not installed")
347381 ctags_base_url = url_for(
348382 self.view_name,
349 repo=self.context['repo'].name,
350 rev=self.context['rev'],
351 path=''
383 repo=self.context["repo"].name,
384 rev=self.context["rev"],
385 path="",
352386 )
353387 ctags_tagsfile = CTAGS_CACHE.get_tagsfile(
354 self.context['repo'].path,
355 self.context['commit'].id
388 self.context["repo"].path, self.context["commit"].id
356389 )
357390 ctags_args = {
358 'ctags': ctags.CTags(ctags_tagsfile.encode(sys.getfilesystemencoding())),
359 'ctags_baseurl': ctags_base_url,
391 "ctags": ctags.CTags(
392 ctags_tagsfile.encode(sys.getfilesystemencoding())
393 ),
394 "ctags_baseurl": ctags_base_url,
360395 }
361396 else:
362397 ctags_args = {}
363398
364399 return highlight_or_render(
365 force_unicode(self.context['blob_or_tree'].data),
366 self.context['filename'],
400 force_unicode(self.context["blob_or_tree"].data),
401 self.context["filename"],
367402 render_markup,
368403 **ctags_args
369404 )
370405
371406 def make_template_context(self, *args):
372407 super(BaseFileView, self).make_template_context(*args)
373 self.context.update({
374 'can_render': True,
375 'is_binary': False,
376 'too_large': False,
377 'is_markup': False,
378 })
379
380 binary = guess_is_binary(self.context['blob_or_tree'])
381 too_large = sum(map(len, self.context['blob_or_tree'].chunked)) > 100*1024
408 self.context.update(
409 {
410 "can_render": True,
411 "is_binary": False,
412 "too_large": False,
413 "is_markup": False,
414 }
415 )
416
417 binary = guess_is_binary(self.context["blob_or_tree"])
418 too_large = sum(map(len, self.context["blob_or_tree"].chunked)) > 100 * 1024
382419 if binary:
383 self.context.update({
384 'can_render': False,
385 'is_binary': True,
386 'is_image': guess_is_image(self.context['filename']),
387 })
420 self.context.update(
421 {
422 "can_render": False,
423 "is_binary": True,
424 "is_image": guess_is_image(self.context["filename"]),
425 }
426 )
388427 elif too_large:
389 self.context.update({
390 'can_render': False,
391 'too_large': True,
392 })
428 self.context.update(
429 {
430 "can_render": False,
431 "too_large": True,
432 }
433 )
393434
394435
395436 class FileView(BaseFileView):
396437 """Shows a file rendered using ``pygmentize``."""
397 template_name = 'view_blob.html'
438
439 template_name = "view_blob.html"
398440
399441 def make_template_context(self, *args):
400442 super(FileView, self).make_template_context(*args)
401 if self.context['can_render']:
402 render_markup = 'markup' not in request.args
403 self.context.update({
404 'is_markup': markup.can_render(self.context['filename']),
405 'render_markup': render_markup,
406 'rendered_code': self.render_code(render_markup),
407 })
443 if self.context["can_render"]:
444 render_markup = "markup" not in request.args
445 self.context.update(
446 {
447 "is_markup": markup.can_render(self.context["filename"]),
448 "render_markup": render_markup,
449 "rendered_code": self.render_code(render_markup),
450 }
451 )
408452
409453
410454 class BlameView(BaseFileView):
411 template_name = 'blame_blob.html'
455 template_name = "blame_blob.html"
412456
413457 def make_template_context(self, *args):
414458 super(BlameView, self).make_template_context(*args)
415 if self.context['can_render']:
416 line_commits = self.context['repo'].blame(self.context['commit'], self.context['path'])
459 if self.context["can_render"]:
460 line_commits = self.context["repo"].blame(
461 self.context["commit"], self.context["path"]
462 )
417463 replace_dupes(line_commits, None)
418 self.context.update({
419 'rendered_code': self.render_code(render_markup=False),
420 'line_commits': line_commits,
421 })
464 self.context.update(
465 {
466 "rendered_code": self.render_code(render_markup=False),
467 "line_commits": line_commits,
468 }
469 )
422470
423471
424472 class RawView(BaseBlobView):
425473 """Show a single file in raw for (as if it were a normal filesystem file
426474 served through a static file server).
427475 """
476
428477 def get_response(self):
429478 # Explicitly set an empty mimetype. This should work well for most
430479 # browsers as they do file type recognition anyway.
431480 # The correct way would be to implement proper file type recognition here.
432 return Response(self.context['blob_or_tree'].chunked, mimetype='')
481 return Response(self.context["blob_or_tree"].chunked, mimetype="")
433482
434483
435484 class DownloadView(BaseRepoView):
436485 """Download a repo as a tar.gz file."""
486
437487 def get_response(self):
438 basename = "%s@%s" % (self.context['repo'].name,
439 sanitize_branch_name(self.context['rev']))
488 basename = "%s@%s" % (
489 self.context["repo"].name,
490 sanitize_branch_name(self.context["rev"]),
491 )
440492 tarname = basename + ".tar.gz"
441493 headers = {
442 'Content-Disposition': "attachment; filename=%s" % tarname,
443 'Cache-Control': "no-store", # Disables browser caching
494 "Content-Disposition": "attachment; filename=%s" % tarname,
495 "Cache-Control": "no-store", # Disables browser caching
444496 }
445497
446498 tar_stream = dulwich.archive.tar_stream(
447 self.context['repo'],
448 self.context['blob_or_tree'],
449 self.context['commit'].commit_time,
499 self.context["repo"],
500 self.context["blob_or_tree"],
501 self.context["commit"].commit_time,
450502 format="gz",
451503 prefix=encode_for_git(basename),
452504 )
453 return Response(
454 tar_stream,
455 mimetype="application/x-tgz",
456 headers=headers
457 )
458
459
460 history = HistoryView.as_view('history', 'history')
461 index = IndexView.as_view('index', 'index')
462 commit = CommitView.as_view('commit', 'commit')
463 patch = PatchView.as_view('patch', 'patch')
464 blame = BlameView.as_view('blame', 'blame')
465 blob = FileView.as_view('blob', 'blob')
466 raw = RawView.as_view('raw', 'raw')
467 download = DownloadView.as_view('download', 'download')
468 submodule = SubmoduleView.as_view('submodule', 'submodule')
505 return Response(tar_stream, mimetype="application/x-tgz", headers=headers)
506
507
508 history = HistoryView.as_view("history", "history")
509 index = IndexView.as_view("index", "index")
510 commit = CommitView.as_view("commit", "commit")
511 patch = PatchView.as_view("patch", "patch")
512 blame = BlameView.as_view("blame", "blame")
513 blob = FileView.as_view("blob", "blob")
514 raw = RawView.as_view("raw", "raw")
515 download = DownloadView.as_view("download", "download")
516 submodule = SubmoduleView.as_view("submodule", "submodule")
77 # policy "install once, find never". Definitely a TODO!
88 # -- https://groups.google.com/group/comp.lang.python/msg/2105ee4d9e8042cb
99 from distutils.command.install import INSTALL_SCHEMES
10
1011 for scheme in INSTALL_SCHEMES.values():
11 scheme['data'] = scheme['purelib']
12 scheme["data"] = scheme["purelib"]
1213
1314
1415 install_data_files_hack()
1516
16 requires = ['six', 'flask', 'Werkzeug>=0.15.0', 'pygments', 'httpauth', 'humanize', 'dulwich>=0.19.3;python_version>="3.5"', 'dulwich>=0.19.3,<0.20;python_version<"3.5"']
17 requires = [
18 "six",
19 "flask",
20 "Werkzeug>=0.15.0",
21 "pygments",
22 "httpauth",
23 "humanize",
24 'dulwich>=0.19.3;python_version>="3.5"',
25 'dulwich>=0.19.3,<0.20;python_version<"3.5"',
26 ]
1727
1828 setup(
19 name='klaus',
20 version='1.5.2',
21 author='Jonas Haag',
22 author_email='jonas@lophus.org',
23 packages=['klaus', 'klaus.contrib'],
24 scripts=['bin/klaus'],
29 name="klaus",
30 version="1.5.2",
31 author="Jonas Haag",
32 author_email="jonas@lophus.org",
33 packages=["klaus", "klaus.contrib"],
34 scripts=["bin/klaus"],
2535 include_package_data=True,
2636 zip_safe=False,
27 url='https://github.com/jonashaag/klaus',
28 description='The first Git web viewer that Just Works™.',
37 url="https://github.com/jonashaag/klaus",
38 description="The first Git web viewer that Just Works™.",
2939 long_description=__doc__,
3040 classifiers=[
3141 "Development Status :: 5 - Production/Stable",
4050 ],
4151 install_requires=requires,
4252 )
43
1010 def test_dont_show_blame_link():
1111 with serve():
1212 for file in ["binary", "image.jpg", "toolarge"]:
13 response = requests.get(TEST_REPO_DONT_RENDER_URL + "blob/HEAD/" + file).text
13 response = requests.get(
14 TEST_REPO_DONT_RENDER_URL + "blob/HEAD/" + file
15 ).text
1416 assert "blame" not in response
1517
1618
1820 """Don't render blame even if someone navigated to the blame site by accident."""
1921 with serve():
2022 for file in ["binary", "image.jpg", "toolarge"]:
21 response = requests.get(TEST_REPO_DONT_RENDER_URL + "blame/HEAD/" + file).text
23 response = requests.get(
24 TEST_REPO_DONT_RENDER_URL + "blame/HEAD/" + file
25 ).text
2226 assert "Can't show blame" in response
00 import os
1
12 try:
23 from importlib import reload # Python 3.4+
34 except ImportError:
1920
2021 def test_minimum_env(monkeypatch):
2122 """Test to provide only required env var"""
22 monkeypatch.setattr(os, 'environ', os.environ.copy())
23 monkeypatch.setattr(os, "environ", os.environ.copy())
2324 check_env(
24 {'KLAUS_SITE_NAME': TEST_SITE_NAME},
25 {"KLAUS_SITE_NAME": TEST_SITE_NAME},
2526 ([], TEST_SITE_NAME),
2627 dict(
2728 htdigest_file=None,
2930 require_browser_auth=False,
3031 disable_push=False,
3132 unauthenticated_push=False,
32 ctags_policy='none')
33 ctags_policy="none",
34 ),
3335 )
3436
3537
3638 def test_complete_env(monkeypatch):
3739 """Test to provide all supported env var"""
38 monkeypatch.setattr(os, 'environ', os.environ.copy())
40 monkeypatch.setattr(os, "environ", os.environ.copy())
3941 check_env(
4042 {
41 'KLAUS_REPOS': TEST_REPO,
42 'KLAUS_SITE_NAME': TEST_SITE_NAME,
43 'KLAUS_HTDIGEST_FILE': HTDIGEST_FILE,
44 'KLAUS_USE_SMARTHTTP': 'yes',
45 'KLAUS_REQUIRE_BROWSER_AUTH': '1',
46 'KLAUS_DISABLE_PUSH': 'false',
47 'KLAUS_UNAUTHENTICATED_PUSH': '0',
48 'KLAUS_CTAGS_POLICY': 'ALL'
43 "KLAUS_REPOS": TEST_REPO,
44 "KLAUS_SITE_NAME": TEST_SITE_NAME,
45 "KLAUS_HTDIGEST_FILE": HTDIGEST_FILE,
46 "KLAUS_USE_SMARTHTTP": "yes",
47 "KLAUS_REQUIRE_BROWSER_AUTH": "1",
48 "KLAUS_DISABLE_PUSH": "false",
49 "KLAUS_UNAUTHENTICATED_PUSH": "0",
50 "KLAUS_CTAGS_POLICY": "ALL",
4951 },
5052 ([TEST_REPO], TEST_SITE_NAME),
5153 dict(
5456 require_browser_auth=True,
5557 disable_push=False,
5658 unauthenticated_push=False,
57 ctags_policy='ALL')
59 ctags_policy="ALL",
60 ),
5861 )
5962
6063
6164 def test_unsupported_boolean_env(monkeypatch):
6265 """Test that unsupported boolean env var raises ValueError"""
63 monkeypatch.setattr(os, 'environ', os.environ.copy())
66 monkeypatch.setattr(os, "environ", os.environ.copy())
6467 with pytest.raises(ValueError):
6568 check_env(
6669 {
67 'KLAUS_REPOS': TEST_REPO,
68 'KLAUS_SITE_NAME': TEST_SITE_NAME,
69 'KLAUS_HTDIGEST_FILE': HTDIGEST_FILE,
70 'KLAUS_USE_SMARTHTTP': 'unsupported',
71 }, (), {}
70 "KLAUS_REPOS": TEST_REPO,
71 "KLAUS_SITE_NAME": TEST_SITE_NAME,
72 "KLAUS_HTDIGEST_FILE": HTDIGEST_FILE,
73 "KLAUS_USE_SMARTHTTP": "unsupported",
74 },
75 (),
76 {},
7277 )
7378
7479
7580 def test_wsgi(monkeypatch):
7681 """Test start of wsgi app"""
77 monkeypatch.setattr(os, 'environ', os.environ.copy())
78 os.environ['KLAUS_REPOS'] = TEST_REPO
79 os.environ['KLAUS_SITE_NAME'] = TEST_SITE_NAME
82 monkeypatch.setattr(os, "environ", os.environ.copy())
83 os.environ["KLAUS_REPOS"] = TEST_REPO
84 os.environ["KLAUS_SITE_NAME"] = TEST_SITE_NAME
8085 from klaus.contrib import wsgi
86
8187 with serve_app(wsgi.application):
8288 assert can_reach_unauth()
8389 assert not can_push_auth()
8490
85 os.environ['KLAUS_HTDIGEST_FILE'] = HTDIGEST_FILE
86 os.environ['KLAUS_USE_SMARTHTTP'] = 'yes'
91 os.environ["KLAUS_HTDIGEST_FILE"] = HTDIGEST_FILE
92 os.environ["KLAUS_USE_SMARTHTTP"] = "yes"
8793 reload(wsgi)
8894 with serve_app(wsgi.application):
8995 assert can_reach_unauth()
9298
9399 def test_wsgi_autoreload(monkeypatch):
94100 """Test start of wsgi autoreload app"""
95 monkeypatch.setattr(os, 'environ', os.environ.copy())
96 os.environ['KLAUS_REPOS_ROOT'] = TEST_REPO_ROOT
97 os.environ['KLAUS_SITE_NAME'] = TEST_SITE_NAME
101 monkeypatch.setattr(os, "environ", os.environ.copy())
102 os.environ["KLAUS_REPOS_ROOT"] = TEST_REPO_ROOT
103 os.environ["KLAUS_SITE_NAME"] = TEST_SITE_NAME
98104 from klaus.contrib import wsgi_autoreload, wsgi_autoreloading
105
99106 with serve_app(wsgi_autoreload.application):
100107 assert can_reach_unauth()
101108 assert not can_push_auth()
102109
103 os.environ['KLAUS_HTDIGEST_FILE'] = HTDIGEST_FILE
104 os.environ['KLAUS_USE_SMARTHTTP'] = 'yes'
110 os.environ["KLAUS_HTDIGEST_FILE"] = HTDIGEST_FILE
111 os.environ["KLAUS_USE_SMARTHTTP"] = "yes"
105112 reload(wsgi_autoreload)
106113 reload(wsgi_autoreloading)
107114 with serve_app(wsgi_autoreload.application):
1919
2020 def test_unauthenticated_push_and_require_browser_auth():
2121 with pytest.raises(ValueError):
22 klaus.make_app([], None, use_smarthttp=True, unauthenticated_push=True, require_browser_auth=True)
22 klaus.make_app(
23 [],
24 None,
25 use_smarthttp=True,
26 unauthenticated_push=True,
27 require_browser_auth=True,
28 )
2329
2430
2531 def test_unauthenticated_push_without_use_smarthttp():
3844 for check, permitted in expected_permissions.items():
3945 if check in globals():
4046 checks = [check]
41 elif check.endswith('auth'):
42 checks = ['can_%s' % check]
47 elif check.endswith("auth"):
48 checks = ["can_%s" % check]
4349 else:
44 checks = ['can_%s_unauth' % check, 'can_%s_auth' % check]
50 checks = ["can_%s_unauth" % check, "can_%s_auth" % check]
4551 for check in checks:
4652 assert globals()[check]() == permitted
53
4754 return test
4855
4956
50 test_nosmart_noauth = options_test(
51 {},
52 {'reach': True, 'clone': False, 'push': False}
53 )
57 test_nosmart_noauth = options_test({}, {"reach": True, "clone": False, "push": False})
5458 test_smart_noauth = options_test(
55 {'use_smarthttp': True},
56 {'reach': True, 'clone': True, 'push': False}
59 {"use_smarthttp": True}, {"reach": True, "clone": True, "push": False}
5760 )
5861 test_smart_push = options_test(
59 {'use_smarthttp': True, 'htdigest_file': open(HTDIGEST_FILE)},
60 {'reach': True, 'clone': True, 'push_auth': True, 'push_unauth': False}
62 {"use_smarthttp": True, "htdigest_file": open(HTDIGEST_FILE)},
63 {"reach": True, "clone": True, "push_auth": True, "push_unauth": False},
6164 )
6265 test_unauthenticated_push = options_test(
63 {'use_smarthttp': True, 'unauthenticated_push': True},
64 {'reach': True, 'clone': True, 'push': True}
66 {"use_smarthttp": True, "unauthenticated_push": True},
67 {"reach": True, "clone": True, "push": True},
6568 )
6669 test_nosmart_auth = options_test(
67 {'require_browser_auth': True, 'htdigest_file': open(HTDIGEST_FILE)},
68 {'reach_auth': True, 'reach_unauth': False, 'clone': False, 'push': False}
70 {"require_browser_auth": True, "htdigest_file": open(HTDIGEST_FILE)},
71 {"reach_auth": True, "reach_unauth": False, "clone": False, "push": False},
6972 )
7073 test_smart_auth = options_test(
71 {'require_browser_auth': True, 'use_smarthttp': True, 'htdigest_file': open(HTDIGEST_FILE)},
72 {'reach_auth': True, 'reach_unauth': False, 'clone_auth': True, 'clone_unauth': False, 'push_unauth': False, 'push_auth': True}
74 {
75 "require_browser_auth": True,
76 "use_smarthttp": True,
77 "htdigest_file": open(HTDIGEST_FILE),
78 },
79 {
80 "reach_auth": True,
81 "reach_unauth": False,
82 "clone_auth": True,
83 "clone_unauth": False,
84 "push_unauth": False,
85 "push_auth": True,
86 },
7387 )
7488 test_smart_auth_disable_push = options_test(
75 {'require_browser_auth': True, 'use_smarthttp': True, 'disable_push': True, 'htdigest_file': open(HTDIGEST_FILE)},
76 {'reach_auth': True, 'reach_unauth': False, 'clone_auth': True, 'clone_unauth': False, 'push': False}
89 {
90 "require_browser_auth": True,
91 "use_smarthttp": True,
92 "disable_push": True,
93 "htdigest_file": open(HTDIGEST_FILE),
94 },
95 {
96 "reach_auth": True,
97 "reach_unauth": False,
98 "clone_auth": True,
99 "clone_unauth": False,
100 "push": False,
101 },
77102 )
78103
79104 test_ctags_disabled = options_test(
80 {},
81 {'ctags_tags_and_branches': False, 'ctags_all': False}
105 {}, {"ctags_tags_and_branches": False, "ctags_all": False}
82106 )
83107 test_ctags_tags_and_branches = options_test(
84 {'ctags_policy': 'tags-and-branches'},
85 {'ctags_tags_and_branches': True, 'ctags_all': False}
108 {"ctags_policy": "tags-and-branches"},
109 {"ctags_tags_and_branches": True, "ctags_all": False},
86110 )
87111 test_ctags_all = options_test(
88 {'ctags_policy': 'ALL'},
89 {'ctags_tags_and_branches': True, 'ctags_all': True}
112 {"ctags_policy": "ALL"}, {"ctags_tags_and_branches": True, "ctags_all": True}
90113 )
91114
92115
94117 def can_reach_unauth():
95118 return _check_http200(_GET_unauth, "test_repo")
96119
120
97121 def can_reach_auth():
98122 return _check_http200(_GET_auth, "test_repo")
99123
102126 def can_clone_unauth():
103127 return _can_clone(_GET_unauth, UNAUTH_TEST_REPO_URL)
104128
129
105130 def can_clone_auth():
106131 return _can_clone(_GET_auth, AUTH_TEST_REPO_URL)
132
107133
108134 def _can_clone(http_get, url):
109135 tmp = tempfile.mkdtemp()
110136 try:
111 return any([
112 "git clone" in http_get(TEST_REPO_URL).text,
113 _check_http200(http_get, TEST_REPO_URL + "info/refs?service=git-upload-pack"),
114 subprocess.call(["git", "clone", url, tmp]) == 0,
115 ])
137 return any(
138 [
139 "git clone" in http_get(TEST_REPO_URL).text,
140 _check_http200(
141 http_get, TEST_REPO_URL + "info/refs?service=git-upload-pack"
142 ),
143 subprocess.call(["git", "clone", url, tmp]) == 0,
144 ]
145 )
116146 finally:
117147 shutil.rmtree(tmp, ignore_errors=True)
118148
121151 def can_push_unauth():
122152 return _can_push(_GET_unauth, UNAUTH_TEST_REPO_URL)
123153
154
124155 def can_push_auth():
125156 return _can_push(_GET_auth, AUTH_TEST_REPO_URL)
126157
158
127159 def _can_push(http_get, url):
128 return any([
129 _check_http200(http_get, TEST_REPO_URL + "info/refs?service=git-receive-pack"),
130 _check_http200(http_get, TEST_REPO_URL + "git-receive-pack"),
131 subprocess.call(["git", "push", url, "master"], cwd=TEST_REPO) == 0,
132 ])
160 return any(
161 [
162 _check_http200(
163 http_get, TEST_REPO_URL + "info/refs?service=git-receive-pack"
164 ),
165 _check_http200(http_get, TEST_REPO_URL + "git-receive-pack"),
166 subprocess.call(["git", "push", url, "master"], cwd=TEST_REPO) == 0,
167 ]
168 )
133169
134170
135171 # Ctags
136172 def ctags_tags_and_branches():
137173 return all(
138174 _ctags_enabled(ref, f)
139 for ref in ["master", "tag1"] for f in ["test.c", "test.js"]
175 for ref in ["master", "tag1"]
176 for f in ["test.c", "test.js"]
140177 )
141178
142179
143180 def ctags_all():
144 all_refs = re.findall('href=".+/commit/([a-z0-9]{40})/">',
145 requests.get(UNAUTH_TEST_REPO_URL).text)
181 all_refs = re.findall(
182 'href=".+/commit/([a-z0-9]{40})/">', requests.get(UNAUTH_TEST_REPO_URL).text
183 )
146184 assert len(all_refs) == 3
147185 return all(
148 _ctags_enabled(ref, f)
149 for ref in all_refs for f in ["test.c", "test.js"]
150 )
186 _ctags_enabled(ref, f) for ref in all_refs for f in ["test.c", "test.js"]
187 )
188
151189
152190 def _ctags_enabled(ref, filename):
153191 response = requests.get(UNAUTH_TEST_REPO_URL + "blob/%s/%s" % (ref, filename))
157195
158196
159197 def _GET_unauth(url=""):
160 return requests.get(UNAUTH_TEST_SERVER + url, auth=requests.auth.HTTPDigestAuth("invalid", "password"))
198 return requests.get(
199 UNAUTH_TEST_SERVER + url,
200 auth=requests.auth.HTTPDigestAuth("invalid", "password"),
201 )
202
161203
162204 def _GET_auth(url=""):
163 return requests.get(AUTH_TEST_SERVER + url, auth=requests.auth.HTTPDigestAuth("testuser", "testpassword"))
205 return requests.get(
206 AUTH_TEST_SERVER + url,
207 auth=requests.auth.HTTPDigestAuth("testuser", "testpassword"),
208 )
209
164210
165211 def _check_http200(http_get, url):
166212 try:
11 import re
22 import subprocess
33 import klaus_cli
4
45 try:
56 from unittest import mock
67 except ImportError:
1213 manpage = force_unicode(subprocess.check_output(["man", "./klaus.1"]))
1314
1415 def assert_in_manpage(s):
15 clean = lambda x: re.sub('(.\\x08)|\\s', '', x)
16 clean = lambda x: re.sub("(.\\x08)|\\s", "", x)
1617 assert clean(s) in clean(manpage), "%r not found in manpage" % s
1718
1819 mock_parser = mock.Mock()
19 with mock.patch('argparse.ArgumentParser') as mock_cls:
20 with mock.patch("argparse.ArgumentParser") as mock_cls:
2021 mock_cls.return_value = mock_parser
2122 klaus_cli.make_parser()
2223
2324 for args, kwargs in mock_parser.add_argument.call_args_list:
24 if kwargs.get('metavar') == 'DIR':
25 if kwargs.get("metavar") == "DIR":
2526 continue
2627 for string in args:
2728 assert_in_manpage(string)
28 if 'help' in kwargs:
29 assert_in_manpage(kwargs['help'])
30 if 'choices' in kwargs:
31 for choice in kwargs['choices']:
29 if "help" in kwargs:
30 assert_in_manpage(kwargs["help"])
31 if "choices" in kwargs:
32 for choice in kwargs["choices"]:
3233 assert_in_manpage(choice)
00 import unittest
1
12 try:
23 from unittest import mock
34 except ImportError:
78
89
910 class ForceUnicodeTests(unittest.TestCase):
10
1111 def test_ascii(self):
12 self.assertEqual(u'foo', utils.force_unicode(b'foo'))
12 self.assertEqual(u"foo", utils.force_unicode(b"foo"))
1313
1414 def test_utf8(self):
15 self.assertEqual(u'f\xce', utils.force_unicode(b'f\xc3\x8e'))
15 self.assertEqual(u"f\xce", utils.force_unicode(b"f\xc3\x8e"))
1616
1717 def test_invalid(self):
18 with mock.patch.object(utils, 'chardet', None):
19 self.assertRaises(
20 UnicodeDecodeError, utils.force_unicode, b'f\xce')
18 with mock.patch.object(utils, "chardet", None):
19 self.assertRaises(UnicodeDecodeError, utils.force_unicode, b"f\xce")
2120
2221
2322 class TarballBasenameTests(unittest.TestCase):
24
2523 def test_examples(self):
2624 examples = [
27 ('v0.1', 'klaus-0.1'),
28 ('klaus-0.1', 'klaus-0.1'),
29 ('0.1', 'klaus-0.1'),
30 ('b3e70e08344ca3f83cc7033ecdbefa90443d7d2e',
31 'klaus@b3e70e08344ca3f83cc7033ecdbefa90443d7d2e'),
32 ('vanilla', 'klaus-vanilla'),
33 ]
25 ("v0.1", "klaus-0.1"),
26 ("klaus-0.1", "klaus-0.1"),
27 ("0.1", "klaus-0.1"),
28 (
29 "b3e70e08344ca3f83cc7033ecdbefa90443d7d2e",
30 "klaus@b3e70e08344ca3f83cc7033ecdbefa90443d7d2e",
31 ),
32 ("vanilla", "klaus-vanilla"),
33 ]
3434 for (rev, basename) in examples:
35 self.assertEqual(utils.tarball_basename('klaus', rev), basename)
35 self.assertEqual(utils.tarball_basename("klaus", rev), basename)
1010 response_body = BytesIO(response.raw.read())
1111 tarball = tarfile.TarFile.gzopen("test.tar.gz", fileobj=response_body)
1212 with contextlib.closing(tarball):
13 assert tarball.extractfile('test_repo@master/test.c').read() == b'int a;\n'
13 assert tarball.extractfile("test_repo@master/test.c").read() == b"int a;\n"
1414
1515
1616 def test_no_newline_at_end_of_file():
4343 assert "blob/HEAD/test.txt" not in response
4444 assert "blob/HEAD/folder/test.txt" in response
4545
46
4647 def test_display_invalid_repos():
4748 with serve():
4849 response = requests.get(UNAUTH_TEST_SERVER).text
4950 assert '<ul class="repolist invalid">' in response
50 assert '<div class=name>invalid_repo</div>' in response
51 assert "<div class=name>invalid_repo</div>" in response
2424
2525 TEST_INVALID_REPO = os.path.abspath("tests/repos/build/invalid_repo")
2626
27 ALL_TEST_REPOS = [TEST_REPO, TEST_REPO_NO_NEWLINE, TEST_REPO_DONT_RENDER, TEST_INVALID_REPO]
27 ALL_TEST_REPOS = [
28 TEST_REPO,
29 TEST_REPO_NO_NEWLINE,
30 TEST_REPO_DONT_RENDER,
31 TEST_INVALID_REPO,
32 ]
2833
2934
3035 @contextlib.contextmanager
4449 yield
4550 finally:
4651 server.server_close()
47 if 'TRAVIS' in os.environ:
52 if "TRAVIS" in os.environ:
4853 # This fixes some "Address already in use" cases on Travis.
4954 time.sleep(1)
5055
5156
5257 def serve_require_auth(*args, **kwargs):
53 kwargs['htdigest_file'] = open(HTDIGEST_FILE)
54 kwargs['require_browser_auth'] = True
58 kwargs["htdigest_file"] = open(HTDIGEST_FILE)
59 kwargs["require_browser_auth"] = True
5560 return testserver(*args, **kwargs)
55 from collections import defaultdict
66 import atexit
77
8
89 def view_from_url(url):
910 try:
10 return url.split('/')[2]
11 return url.split("/")[2]
1112 except IndexError:
1213 return url
14
1315
1416 AHREF_RE = re.compile('href="([\w/][^"]+)"')
1517
1719 errors = defaultdict(set)
1820 durations = defaultdict(list)
1921
22
2023 def main():
21 urls = {'/'}
24 urls = {"/"}
2225 while urls:
2326 try:
2427 http_conn.close()
2528 except NameError:
2629 pass
27 http_conn = httplib.HTTPConnection('localhost', 8080)
30 http_conn = httplib.HTTPConnection("localhost", 8080)
2831 url = urls.pop()
2932 if url in seen:
3033 continue
3134 seen.add(url)
32 if url.startswith('http'):
35 if url.startswith("http"):
3336 continue
34 if '-v' in sys.argv:
35 print 'Requesting %r...' % url
37 if "-v" in sys.argv:
38 print "Requesting %r..." % url
3639 start = time.time()
37 http_conn.request('GET', url)
40 http_conn.request("GET", url)
3841 response = http_conn.getresponse()
3942 durations[view_from_url(url)].append(time.time() - start)
4043 status = str(response.status)
41 if status[0] == '3':
42 urls.add(response.getheader('Location'))
43 elif status[0] == '2':
44 if not '/raw/' in url:
44 if status[0] == "3":
45 urls.add(response.getheader("Location"))
46 elif status[0] == "2":
47 if not "/raw/" in url:
4548 html = response.read()
46 html = re.sub('<pre>.*?</pre>', '', html)
49 html = re.sub("<pre>.*?</pre>", "", html)
4750 urls.update(AHREF_RE.findall(html))
4851 else:
49 if '--failfast' in sys.argv:
52 if "--failfast" in sys.argv:
5053 print url, status
5154 exit(1)
5255 errors[status].add(url)
5356
57
5458 def print_stats():
5559 import pprint
56 print len(seen)
60
61 print (len(seen))
5762 pprint.pprint(dict(errors))
58 print {url: sum(times)/len(times) for url, times in durations.iteritems()}
63 print ({url: sum(times) / len(times) for url, times in durations.iteritems()})
64
65
5966 atexit.register(print_stats)
6067
6168 main()