Black format all the things
Jonas Haag
4 months ago
16 | 16 | def git_repository(path): |
17 | 17 | path = os.path.abspath(path) |
18 | 18 | if not os.path.exists(path): |
19 | raise argparse.ArgumentTypeError('%r: No such directory' % path) | |
19 | raise argparse.ArgumentTypeError("%r: No such directory" % path) | |
20 | 20 | try: |
21 | 21 | Repo(path) |
22 | 22 | except NotGitRepository: |
23 | raise argparse.ArgumentTypeError('%r: Not a Git repository' % path) | |
23 | raise argparse.ArgumentTypeError("%r: Not a Git repository" % path) | |
24 | 24 | return path |
25 | 25 | |
26 | 26 | |
27 | 27 | def make_parser(): |
28 | 28 | parser = argparse.ArgumentParser() |
29 | parser.add_argument('--host', help="default: 127.0.0.1", default='127.0.0.1') | |
30 | parser.add_argument('--port', help="default: 8080", default=8080, type=int) | |
31 | parser.add_argument('--site-name', help="site name showed in header. default: your hostname") | |
32 | parser.add_argument('--version', help='print version number', action='store_true') | |
33 | parser.add_argument('-b', '--browser', help="open klaus in a browser on server start", | |
34 | default=False, action='store_true') | |
35 | parser.add_argument('-B', '--with-browser', help="specify which browser to use with --browser", | |
36 | metavar='BROWSER', default=None) | |
37 | parser.add_argument('--ctags', help="enable ctags for which revisions? default: none. " | |
38 | "WARNING: Don't use 'ALL' for public servers!", | |
39 | choices=['none', 'tags-and-branches', 'ALL'], default='none') | |
29 | parser.add_argument("--host", help="default: 127.0.0.1", default="127.0.0.1") | |
30 | parser.add_argument("--port", help="default: 8080", default=8080, type=int) | |
31 | parser.add_argument( | |
32 | "--site-name", help="site name showed in header. default: your hostname" | |
33 | ) | |
34 | parser.add_argument("--version", help="print version number", action="store_true") | |
35 | parser.add_argument( | |
36 | "-b", | |
37 | "--browser", | |
38 | help="open klaus in a browser on server start", | |
39 | default=False, | |
40 | action="store_true", | |
41 | ) | |
42 | parser.add_argument( | |
43 | "-B", | |
44 | "--with-browser", | |
45 | help="specify which browser to use with --browser", | |
46 | metavar="BROWSER", | |
47 | default=None, | |
48 | ) | |
49 | parser.add_argument( | |
50 | "--ctags", | |
51 | help="enable ctags for which revisions? default: none. " | |
52 | "WARNING: Don't use 'ALL' for public servers!", | |
53 | choices=["none", "tags-and-branches", "ALL"], | |
54 | default="none", | |
55 | ) | |
40 | 56 | |
41 | parser.add_argument('repos', help='repositories to serve', | |
42 | metavar='DIR', nargs='*', type=git_repository) | |
57 | parser.add_argument( | |
58 | "repos", | |
59 | help="repositories to serve", | |
60 | metavar="DIR", | |
61 | nargs="*", | |
62 | type=git_repository, | |
63 | ) | |
43 | 64 | |
44 | 65 | grp = parser.add_argument_group("Git Smart HTTP") |
45 | grp.add_argument('--smarthttp', help="enable Git Smart HTTP serving", | |
46 | action='store_true') | |
47 | grp.add_argument('--htdigest', help="use credentials from FILE", | |
48 | metavar="FILE", type=argparse.FileType('r')) | |
66 | grp.add_argument( | |
67 | "--smarthttp", help="enable Git Smart HTTP serving", action="store_true" | |
68 | ) | |
69 | grp.add_argument( | |
70 | "--htdigest", | |
71 | help="use credentials from FILE", | |
72 | metavar="FILE", | |
73 | type=argparse.FileType("r"), | |
74 | ) | |
49 | 75 | |
50 | 76 | grp = parser.add_argument_group("Development flags", "DO NOT USE IN PRODUCTION!") |
51 | grp.add_argument('--debug', help="Enable Werkzeug debugger and reloader", action='store_true') | |
77 | grp.add_argument( | |
78 | "--debug", help="Enable Werkzeug debugger and reloader", action="store_true" | |
79 | ) | |
52 | 80 | |
53 | 81 | return parser |
54 | 82 | |
61 | 89 | return 0 |
62 | 90 | |
63 | 91 | if args.htdigest and not args.smarthttp: |
64 | print("ERROR: --htdigest option has no effect without --smarthttp enabled", file=sys.stderr) | |
92 | print( | |
93 | "ERROR: --htdigest option has no effect without --smarthttp enabled", | |
94 | file=sys.stderr, | |
95 | ) | |
65 | 96 | return 1 |
66 | 97 | |
67 | 98 | if not args.repos: |
68 | print("WARNING: No repositories supplied -- syntax is 'klaus dir1 dir2...'.", file=sys.stderr) | |
99 | print( | |
100 | "WARNING: No repositories supplied -- syntax is 'klaus dir1 dir2...'.", | |
101 | file=sys.stderr, | |
102 | ) | |
69 | 103 | |
70 | 104 | if not args.site_name: |
71 | args.site_name = '%s:%d' % (args.host, args.port) | |
105 | args.site_name = "%s:%d" % (args.host, args.port) | |
72 | 106 | |
73 | if args.ctags != 'none': | |
107 | if args.ctags != "none": | |
74 | 108 | from klaus.ctagsutils import check_have_exuberant_ctags |
109 | ||
75 | 110 | if not check_have_exuberant_ctags(): |
76 | print("ERROR: Exuberant ctags not installed (or 'ctags' binary isn't *Exuberant* ctags)", file=sys.stderr) | |
111 | print( | |
112 | "ERROR: Exuberant ctags not installed (or 'ctags' binary isn't *Exuberant* ctags)", | |
113 | file=sys.stderr, | |
114 | ) | |
77 | 115 | return 1 |
78 | 116 | try: |
79 | 117 | import ctags |
93 | 131 | |
94 | 132 | app.run(args.host, args.port, args.debug) |
95 | 133 | |
134 | ||
96 | 135 | def _open_browser(args): |
97 | 136 | # Open a web browser onto the server URL. Technically we're jumping the |
98 | 137 | # gun a little here since the server is not yet running, but there's no |
104 | 143 | opener = webbrowser.open |
105 | 144 | else: |
106 | 145 | opener = webbrowser.get(args.with_browser).open |
107 | opener('http://%s:%s' % (args.host, args.port)) | |
146 | opener("http://%s:%s" % (args.host, args.port)) | |
108 | 147 | |
109 | 148 | |
110 | if __name__ == '__main__': | |
149 | if __name__ == "__main__": | |
111 | 150 | exit(main()) |
6 | 6 | from klaus.repo import FancyRepo, InvalidRepo |
7 | 7 | |
8 | 8 | |
9 | KLAUS_VERSION = utils.guess_git_revision() or '1.5.2' | |
9 | KLAUS_VERSION = utils.guess_git_revision() or "1.5.2" | |
10 | 10 | |
11 | 11 | |
12 | 12 | class Klaus(flask.Flask): |
13 | 13 | jinja_options = { |
14 | 'extensions': ['jinja2.ext.autoescape'], | |
15 | 'undefined': jinja2.StrictUndefined | |
14 | "extensions": ["jinja2.ext.autoescape"], | |
15 | "undefined": jinja2.StrictUndefined, | |
16 | 16 | } |
17 | 17 | |
18 | def __init__(self, repo_paths, site_name, use_smarthttp, ctags_policy='none'): | |
18 | def __init__(self, repo_paths, site_name, use_smarthttp, ctags_policy="none"): | |
19 | 19 | """(See `make_app` for parameter descriptions.)""" |
20 | 20 | self.site_name = site_name |
21 | 21 | self.use_smarthttp = use_smarthttp |
33 | 33 | """Called by Flask.__init__""" |
34 | 34 | env = super(Klaus, self).create_jinja_environment() |
35 | 35 | for func in [ |
36 | 'force_unicode', | |
37 | 'timesince', | |
38 | 'shorten_sha1', | |
39 | 'shorten_message', | |
40 | 'extract_author_name', | |
41 | 'formattimestamp', | |
36 | "force_unicode", | |
37 | "timesince", | |
38 | "shorten_sha1", | |
39 | "shorten_message", | |
40 | "extract_author_name", | |
41 | "formattimestamp", | |
42 | 42 | ]: |
43 | 43 | env.filters[func] = getattr(utils, func) |
44 | 44 | |
45 | env.globals['KLAUS_VERSION'] = KLAUS_VERSION | |
46 | env.globals['USE_SMARTHTTP'] = self.use_smarthttp | |
47 | env.globals['SITE_NAME'] = self.site_name | |
45 | env.globals["KLAUS_VERSION"] = KLAUS_VERSION | |
46 | env.globals["USE_SMARTHTTP"] = self.use_smarthttp | |
47 | env.globals["SITE_NAME"] = self.site_name | |
48 | 48 | |
49 | 49 | return env |
50 | 50 | |
51 | 51 | def setup_routes(self): |
52 | # fmt: off | |
52 | 53 | for endpoint, rule in [ |
53 | 54 | ('repo_list', '/'), |
54 | 55 | ('robots_txt', '/robots.txt/'), |
70 | 71 | ('download', '/<repo>/tarball/<path:rev>/'), |
71 | 72 | ]: |
72 | 73 | self.add_url_rule(rule, view_func=getattr(views, endpoint)) |
74 | # fmt: on | |
73 | 75 | |
74 | 76 | def should_use_ctags(self, git_repo, git_commit): |
75 | if self.ctags_policy == 'none': | |
77 | if self.ctags_policy == "none": | |
76 | 78 | return False |
77 | elif self.ctags_policy == 'ALL': | |
79 | elif self.ctags_policy == "ALL": | |
78 | 80 | return True |
79 | elif self.ctags_policy == 'tags-and-branches': | |
81 | elif self.ctags_policy == "tags-and-branches": | |
80 | 82 | return git_commit.id in git_repo.get_tag_and_branch_shas() |
81 | 83 | else: |
82 | 84 | raise ValueError("Unknown ctags policy %r" % self.ctags_policy) |
92 | 94 | return valid_repos, invalid_repos |
93 | 95 | |
94 | 96 | |
95 | def make_app(repo_paths, site_name, use_smarthttp=False, htdigest_file=None, | |
96 | require_browser_auth=False, disable_push=False, unauthenticated_push=False, | |
97 | ctags_policy='none'): | |
97 | def make_app( | |
98 | repo_paths, | |
99 | site_name, | |
100 | use_smarthttp=False, | |
101 | htdigest_file=None, | |
102 | require_browser_auth=False, | |
103 | disable_push=False, | |
104 | unauthenticated_push=False, | |
105 | ctags_policy="none", | |
106 | ): | |
98 | 107 | """ |
99 | 108 | Returns a WSGI app with all the features (smarthttp, authentication) |
100 | 109 | already patched in. |
124 | 133 | if disable_push: |
125 | 134 | raise ValueError("'unauthenticated_push' set with 'disable_push'") |
126 | 135 | if require_browser_auth: |
127 | raise ValueError("Incompatible options 'unauthenticated_push' and 'require_browser_auth'") | |
136 | raise ValueError( | |
137 | "Incompatible options 'unauthenticated_push' and 'require_browser_auth'" | |
138 | ) | |
128 | 139 | if htdigest_file and not (require_browser_auth or use_smarthttp): |
129 | raise ValueError("'htdigest_file' set without 'use_smarthttp' or 'require_browser_auth'") | |
140 | raise ValueError( | |
141 | "'htdigest_file' set without 'use_smarthttp' or 'require_browser_auth'" | |
142 | ) | |
130 | 143 | |
131 | 144 | app = Klaus( |
132 | 145 | repo_paths, |
139 | 152 | if use_smarthttp: |
140 | 153 | # `path -> Repo` mapping for Dulwich's web support |
141 | 154 | dulwich_backend = dulwich.server.DictBackend( |
142 | {'/'+name: repo for name, repo in app.valid_repos.items()} | |
155 | {"/" + name: repo for name, repo in app.valid_repos.items()} | |
143 | 156 | ) |
144 | 157 | # Dulwich takes care of all Git related requests/URLs |
145 | 158 | # and passes through everything else to klaus |
163 | 176 | # Git will never call /<repo-name>/git-receive-pack if authentication |
164 | 177 | # failed for /info/refs, but since it's used to upload stuff to the server |
165 | 178 | # we must secure it anyway for security reasons. |
166 | PATTERN = r'^/[^/]+/(info/refs\?service=git-receive-pack|git-receive-pack)$' | |
179 | PATTERN = r"^/[^/]+/(info/refs\?service=git-receive-pack|git-receive-pack)$" | |
167 | 180 | if unauthenticated_push: |
168 | 181 | # DANGER ZONE: Don't require authentication for push'ing |
169 | 182 | app.wsgi_app = dulwich_wrapped_app |
192 | 205 | |
193 | 206 | if require_browser_auth: |
194 | 207 | app.wsgi_app = httpauth.DigestFileHttpAuthMiddleware( |
195 | htdigest_file, | |
196 | wsgi_app=app.wsgi_app | |
208 | htdigest_file, wsgi_app=app.wsgi_app | |
197 | 209 | ) |
198 | 210 | |
199 | 211 | return app |
2 | 2 | |
3 | 3 | |
4 | 4 | def get_args_from_env(): |
5 | repos = os.environ.get('KLAUS_REPOS', []) | |
5 | repos = os.environ.get("KLAUS_REPOS", []) | |
6 | 6 | if repos: |
7 | 7 | repos = repos.split() |
8 | args = ( | |
9 | repos, | |
10 | os.environ.get('KLAUS_SITE_NAME', 'unnamed site') | |
11 | ) | |
8 | args = (repos, os.environ.get("KLAUS_SITE_NAME", "unnamed site")) | |
12 | 9 | kwargs = dict( |
13 | htdigest_file=os.environ.get('KLAUS_HTDIGEST_FILE'), | |
14 | use_smarthttp=strtobool(os.environ.get('KLAUS_USE_SMARTHTTP', '0')), | |
10 | htdigest_file=os.environ.get("KLAUS_HTDIGEST_FILE"), | |
11 | use_smarthttp=strtobool(os.environ.get("KLAUS_USE_SMARTHTTP", "0")), | |
15 | 12 | require_browser_auth=strtobool( |
16 | os.environ.get('KLAUS_REQUIRE_BROWSER_AUTH', '0')), | |
17 | disable_push=strtobool(os.environ.get('KLAUS_DISABLE_PUSH', '0')), | |
13 | os.environ.get("KLAUS_REQUIRE_BROWSER_AUTH", "0") | |
14 | ), | |
15 | disable_push=strtobool(os.environ.get("KLAUS_DISABLE_PUSH", "0")), | |
18 | 16 | unauthenticated_push=strtobool( |
19 | os.environ.get('KLAUS_UNAUTHENTICATED_PUSH', '0')), | |
20 | ctags_policy=os.environ.get('KLAUS_CTAGS_POLICY', 'none') | |
17 | os.environ.get("KLAUS_UNAUTHENTICATED_PUSH", "0") | |
18 | ), | |
19 | ctags_policy=os.environ.get("KLAUS_CTAGS_POLICY", "none"), | |
21 | 20 | ) |
22 | 21 | return args, kwargs |
2 | 2 | |
3 | 3 | args, kwargs = get_args_from_env() |
4 | 4 | |
5 | if kwargs['htdigest_file']: | |
6 | with open(kwargs['htdigest_file']) as file: | |
7 | kwargs['htdigest_file'] = file | |
5 | if kwargs["htdigest_file"]: | |
6 | with open(kwargs["htdigest_file"]) as file: | |
7 | kwargs["htdigest_file"] = file | |
8 | 8 | application = make_app(*args, **kwargs) |
9 | 9 | else: |
10 | 10 | application = make_app(*args, **kwargs) |
5 | 5 | from .wsgi_autoreloading import make_autoreloading_app |
6 | 6 | |
7 | 7 | |
8 | if 'KLAUS_REPOS' in os.environ: | |
9 | warnings.warn("use KLAUS_REPOS_ROOT instead of KLAUS_REPOS for the autoreloader apps", DeprecationWarning) | |
8 | if "KLAUS_REPOS" in os.environ: | |
9 | warnings.warn( | |
10 | "use KLAUS_REPOS_ROOT instead of KLAUS_REPOS for the autoreloader apps", | |
11 | DeprecationWarning, | |
12 | ) | |
10 | 13 | |
11 | 14 | args, kwargs = get_args_from_env() |
12 | repos_root = os.environ.get('KLAUS_REPOS_ROOT') or os.environ['KLAUS_REPOS'] | |
15 | repos_root = os.environ.get("KLAUS_REPOS_ROOT") or os.environ["KLAUS_REPOS"] | |
13 | 16 | args = (repos_root,) + args[1:] |
14 | 17 | |
15 | if kwargs['htdigest_file']: | |
18 | if kwargs["htdigest_file"]: | |
16 | 19 | # Cache the contents of the htdigest file, the application will not read |
17 | 20 | # the file like object until later when called. |
18 | with io.open(kwargs['htdigest_file'], encoding='utf-8') as htdigest_file: | |
19 | kwargs['htdigest_file'] = io.StringIO(htdigest_file.read()) | |
21 | with io.open(kwargs["htdigest_file"], encoding="utf-8") as htdigest_file: | |
22 | kwargs["htdigest_file"] = io.StringIO(htdigest_file.read()) | |
20 | 23 | |
21 | 24 | application = make_autoreloading_app(*args, **kwargs) |
17 | 17 | Polls `dir` for changes every `interval` seconds and sets `should_reload` |
18 | 18 | accordingly. |
19 | 19 | """ |
20 | glob_pattern = dir + '/*' | |
20 | glob_pattern = dir + "/*" | |
21 | 21 | old_contents = glob.glob(glob_pattern) |
22 | 22 | while 1: |
23 | 23 | time.sleep(interval) |
36 | 36 | if S.should_reload: |
37 | 37 | # Refresh inner application with new repo list |
38 | 38 | print("Reloading repository list...") |
39 | S.inner_app = make_app(glob.glob(repos_root + '/*'), *args, **kwargs) | |
39 | S.inner_app = make_app(glob.glob(repos_root + "/*"), *args, **kwargs) | |
40 | 40 | S.should_reload = False |
41 | 41 | return S.inner_app(environ, start_response) |
42 | 42 | |
46 | 46 | poller_thread.start() |
47 | 47 | |
48 | 48 | return app |
49 |
45 | 45 | :return: path to the compressed version of the tagsfile |
46 | 46 | """ |
47 | 47 | _, compressed_tagsfile_path = tempfile.mkstemp() |
48 | with open(uncompressed_tagsfile_path, 'rb') as uncompressed: | |
49 | with gzip.open(compressed_tagsfile_path, 'wb', COMPRESSION_LEVEL) as compressed: | |
48 | with open(uncompressed_tagsfile_path, "rb") as uncompressed: | |
49 | with gzip.open(compressed_tagsfile_path, "wb", COMPRESSION_LEVEL) as compressed: | |
50 | 50 | shutil.copyfileobj(uncompressed, compressed) |
51 | 51 | return compressed_tagsfile_path |
52 | 52 | |
57 | 57 | :return: path to the uncompressed version of the tagsfile |
58 | 58 | """ |
59 | 59 | _, uncompressed_tagsfile_path = tempfile.mkstemp() |
60 | with gzip.open(compressed_tagsfile_path, 'rb') as compressed: | |
61 | with open(uncompressed_tagsfile_path, 'wb') as uncompressed: | |
60 | with gzip.open(compressed_tagsfile_path, "rb") as compressed: | |
61 | with open(uncompressed_tagsfile_path, "wb") as uncompressed: | |
62 | 62 | shutil.copyfileobj(compressed, uncompressed) |
63 | 63 | return uncompressed_tagsfile_path |
64 | 64 | |
65 | 65 | |
66 | 66 | MiB = 1024 * 1024 |
67 | ||
67 | 68 | |
68 | 69 | class CTagsCache(object): |
69 | 70 | """A ctags cache. Both uncompressed and compressed entries are kept in |
83 | 84 | - When the tagsfile is requested and it's in the compressed cache sector, |
84 | 85 | it is moved back to the uncompressed sector prior to using it. |
85 | 86 | """ |
86 | def __init__(self, uncompressed_max_bytes=30*MiB, compressed_max_bytes=20*MiB): | |
87 | ||
88 | def __init__(self, uncompressed_max_bytes=30 * MiB, compressed_max_bytes=20 * MiB): | |
87 | 89 | self.uncompressed_max_bytes = uncompressed_max_bytes |
88 | 90 | self.compressed_max_bytes = compressed_max_bytes |
89 | 91 | # Note: We use dulwich's LRU cache to store the tagsfile paths here, |
90 | 92 | # but we could easily replace it by any other (LRU) cache implementation. |
91 | self._uncompressed_cache = LRUSizeCache(uncompressed_max_bytes, compute_size=os.path.getsize) | |
92 | self._compressed_cache = LRUSizeCache(compressed_max_bytes, compute_size=os.path.getsize) | |
93 | self._uncompressed_cache = LRUSizeCache( | |
94 | uncompressed_max_bytes, compute_size=os.path.getsize | |
95 | ) | |
96 | self._compressed_cache = LRUSizeCache( | |
97 | compressed_max_bytes, compute_size=os.path.getsize | |
98 | ) | |
93 | 99 | self._clearing = False |
94 | 100 | self._lock = threading.Lock() |
95 | 101 | |
127 | 133 | |
128 | 134 | if git_rev in self._compressed_cache: |
129 | 135 | compressed_tagsfile_path = self._compressed_cache[git_rev] |
130 | uncompressed_tagsfile_path = uncompress_tagsfile(compressed_tagsfile_path) | |
131 | self._compressed_cache._remove_node(self._compressed_cache._cache[git_rev]) | |
136 | uncompressed_tagsfile_path = uncompress_tagsfile( | |
137 | compressed_tagsfile_path | |
138 | ) | |
139 | self._compressed_cache._remove_node( | |
140 | self._compressed_cache._cache[git_rev] | |
141 | ) | |
132 | 142 | else: |
133 | 143 | # Not in cache. |
134 | 144 | uncompressed_tagsfile_path = create_tagsfile(git_repo_path, git_rev) |
135 | self._uncompressed_cache.add(git_rev, uncompressed_tagsfile_path, | |
136 | self._clear_uncompressed_entry) | |
145 | self._uncompressed_cache.add( | |
146 | git_rev, uncompressed_tagsfile_path, self._clear_uncompressed_entry | |
147 | ) | |
137 | 148 | return uncompressed_tagsfile_path |
138 | 149 | |
139 | 150 | def _clear_uncompressed_entry(self, git_rev, uncompressed_tagsfile_path): |
148 | 159 | if not self._clearing: |
149 | 160 | # If we're clearing the whole cache, don't waste time moving tagsfiles |
150 | 161 | # from uncompressed to compressed cache, but remove them directly instead. |
151 | self._compressed_cache.add(git_rev, compress_tagsfile(uncompressed_tagsfile_path), | |
152 | self._clear_compressed_entry) | |
162 | self._compressed_cache.add( | |
163 | git_rev, | |
164 | compress_tagsfile(uncompressed_tagsfile_path), | |
165 | self._clear_compressed_entry, | |
166 | ) | |
153 | 167 | delete_tagsfile(uncompressed_tagsfile_path) |
154 | 168 | |
155 | 169 | def _clear_compressed_entry(self, git_rev, compressed_tagsfile_path): |
7 | 7 | def check_have_exuberant_ctags(): |
8 | 8 | """Check that the 'ctags' binary is *Exuberant* ctags (not etags etc)""" |
9 | 9 | try: |
10 | return b"Exuberant" in subprocess.check_output(["ctags", "--version"], stderr=subprocess.PIPE) | |
10 | return b"Exuberant" in subprocess.check_output( | |
11 | ["ctags", "--version"], stderr=subprocess.PIPE | |
12 | ) | |
11 | 13 | except subprocess.CalledProcessError: |
12 | 14 | return False |
13 | 15 | |
20 | 22 | |
21 | 23 | :return: path to the generated tagsfile |
22 | 24 | """ |
23 | assert check_have_exuberant_ctags(), "'ctags' binary is missing or not *Exuberant* ctags" | |
25 | assert ( | |
26 | check_have_exuberant_ctags() | |
27 | ), "'ctags' binary is missing or not *Exuberant* ctags" | |
24 | 28 | |
25 | 29 | _, target_tagsfile = tempfile.mkstemp() |
26 | 30 | checkout_tmpdir = tempfile.mkdtemp() |
27 | 31 | try: |
28 | subprocess.check_call(["git", "clone", "-q", "--shared", git_repo_path, checkout_tmpdir]) | |
32 | subprocess.check_call( | |
33 | ["git", "clone", "-q", "--shared", git_repo_path, checkout_tmpdir] | |
34 | ) | |
29 | 35 | subprocess.check_call(["git", "checkout", "-q", git_rev], cwd=checkout_tmpdir) |
30 | subprocess.check_call(["ctags", "--fields=+l", "-Rno", target_tagsfile], cwd=checkout_tmpdir) | |
36 | subprocess.check_call( | |
37 | ["ctags", "--fields=+l", "-Rno", target_tagsfile], cwd=checkout_tmpdir | |
38 | ) | |
31 | 39 | finally: |
32 | 40 | shutil.rmtree(checkout_tmpdir) |
33 | 41 | return target_tagsfile |
24 | 24 | end -= 1 |
25 | 25 | end += 1 |
26 | 26 | if start or end: |
27 | ||
27 | 28 | def do(l, tag): |
28 | 29 | last = end + len(l) |
29 | return b''.join( | |
30 | [l[:start], b'<', tag, b'>', l[start:last], b'</', tag, b'>', | |
31 | l[last:]]) | |
32 | old_line = do(old_line, b'del') | |
33 | new_line = do(new_line, b'ins') | |
30 | return b"".join( | |
31 | [l[:start], b"<", tag, b">", l[start:last], b"</", tag, b">", l[last:]] | |
32 | ) | |
33 | ||
34 | old_line = do(old_line, b"del") | |
35 | new_line = do(new_line, b"ins") | |
34 | 36 | return old_line, new_line |
35 | 37 | |
36 | 38 | |
39 | 41 | actions = [] |
40 | 42 | chunks = [] |
41 | 43 | for group in SequenceMatcher(None, a, b).get_grouped_opcodes(n): |
42 | old_line, old_end, new_line, new_end = group[0][1], group[-1][2], group[0][3], group[-1][4] | |
44 | old_line, old_end, new_line, new_end = ( | |
45 | group[0][1], | |
46 | group[-1][2], | |
47 | group[0][3], | |
48 | group[-1][4], | |
49 | ) | |
43 | 50 | lines = [] |
51 | ||
44 | 52 | def add_line(old_lineno, new_lineno, action, line): |
45 | 53 | actions.append(action) |
46 | lines.append({ | |
47 | 'old_lineno': old_lineno, | |
48 | 'new_lineno': new_lineno, | |
49 | 'action': action, | |
50 | 'line': line, | |
51 | 'no_newline': not line.endswith(b'\n') | |
52 | }) | |
54 | lines.append( | |
55 | { | |
56 | "old_lineno": old_lineno, | |
57 | "new_lineno": new_lineno, | |
58 | "action": action, | |
59 | "line": line, | |
60 | "no_newline": not line.endswith(b"\n"), | |
61 | } | |
62 | ) | |
63 | ||
53 | 64 | chunks.append(lines) |
54 | 65 | for tag, i1, i2, j1, j2 in group: |
55 | if tag == 'equal': | |
66 | if tag == "equal": | |
56 | 67 | for c, line in enumerate(a[i1:i2]): |
57 | add_line(i1+c, j1+c, 'unmod', e(line)) | |
58 | elif tag == 'insert': | |
68 | add_line(i1 + c, j1 + c, "unmod", e(line)) | |
69 | elif tag == "insert": | |
59 | 70 | for c, line in enumerate(b[j1:j2]): |
60 | add_line(None, j1+c, 'add', e(line)) | |
61 | elif tag == 'delete': | |
71 | add_line(None, j1 + c, "add", e(line)) | |
72 | elif tag == "delete": | |
62 | 73 | for c, line in enumerate(a[i1:i2]): |
63 | add_line(i1+c, None, 'del', e(line)) | |
64 | elif tag == 'replace': | |
74 | add_line(i1 + c, None, "del", e(line)) | |
75 | elif tag == "replace": | |
65 | 76 | for c, line in enumerate(a[i1:i2]): |
66 | add_line(i1+c, None, 'del', e(line)) | |
77 | add_line(i1 + c, None, "del", e(line)) | |
67 | 78 | for c, line in enumerate(b[j1:j2]): |
68 | add_line(None, j1+c, 'add', e(line)) | |
79 | add_line(None, j1 + c, "add", e(line)) | |
69 | 80 | else: |
70 | raise AssertionError('unknown tag %s' % tag) | |
81 | raise AssertionError("unknown tag %s" % tag) | |
71 | 82 | |
72 | return actions.count('add'), actions.count('del'), chunks | |
83 | return actions.count("add"), actions.count("del"), chunks |
0 | 0 | from six.moves import filter |
1 | 1 | |
2 | 2 | from pygments import highlight |
3 | from pygments.lexers import get_lexer_by_name, get_lexer_for_filename, \ | |
4 | guess_lexer, ClassNotFound, TextLexer | |
3 | from pygments.lexers import ( | |
4 | get_lexer_by_name, | |
5 | get_lexer_for_filename, | |
6 | guess_lexer, | |
7 | ClassNotFound, | |
8 | TextLexer, | |
9 | ) | |
5 | 10 | from pygments.formatters import HtmlFormatter |
6 | 11 | |
7 | 12 | from klaus import markup |
13 | 18 | "REXX Ruby SML SQL Scheme Sh Tcl Tex VHDL Verilog Vim" |
14 | 19 | # Not supported by Pygments: Asp Ant BETA Flex SLang Vera YACC |
15 | 20 | ).split() |
16 | PYGMENTS_CTAGS_LANGUAGE_MAP = dict((get_lexer_by_name(l).name, l) for l in CTAGS_SUPPORTED_LANGUAGES) | |
21 | PYGMENTS_CTAGS_LANGUAGE_MAP = dict( | |
22 | (get_lexer_by_name(l).name, l) for l in CTAGS_SUPPORTED_LANGUAGES | |
23 | ) | |
17 | 24 | |
18 | 25 | |
19 | 26 | class KlausDefaultFormatter(HtmlFormatter): |
20 | 27 | def __init__(self, language, ctags, **kwargs): |
21 | HtmlFormatter.__init__(self, linenos='table', lineanchors='L', | |
22 | linespans='L', anchorlinenos=True, **kwargs) | |
28 | HtmlFormatter.__init__( | |
29 | self, | |
30 | linenos="table", | |
31 | lineanchors="L", | |
32 | linespans="L", | |
33 | anchorlinenos=True, | |
34 | **kwargs | |
35 | ) | |
23 | 36 | self.language = language |
24 | 37 | if ctags: |
25 | 38 | # Use Pygments' ctags system but provide our own CTags instance |
30 | 43 | for tag, line in HtmlFormatter._format_lines(self, tokensource): |
31 | 44 | if tag == 1: |
32 | 45 | # sourcecode line |
33 | line = '<span class=line>%s</span>' % line | |
46 | line = "<span class=line>%s</span>" % line | |
34 | 47 | yield tag, line |
35 | 48 | |
36 | 49 | def _lookup_ctag(self, token): |
39 | 52 | if not best_matches: |
40 | 53 | return None, None |
41 | 54 | else: |
42 | return (best_matches[0]['file'].decode("utf-8"), | |
43 | best_matches[0]['lineNumber']) | |
55 | return ( | |
56 | best_matches[0]["file"].decode("utf-8"), | |
57 | best_matches[0]["lineNumber"], | |
58 | ) | |
44 | 59 | |
45 | 60 | def _get_all_ctags_matches(self, token): |
46 | FIELDS = ('file', 'lineNumber', 'kind', b'language') | |
61 | FIELDS = ("file", "lineNumber", "kind", b"language") | |
47 | 62 | from ctags import TagEntry |
63 | ||
48 | 64 | entry = TagEntry() # target "buffer" for ctags |
49 | 65 | if self._ctags.find(entry, token.encode("utf-8"), 0): |
50 | 66 | yield dict((k, entry[k]) for k in FIELDS) |
55 | 71 | if self.language is None: |
56 | 72 | return matches |
57 | 73 | else: |
58 | return filter(lambda match: match[b'language'] == self.language.encode("utf-8"), matches) | |
74 | return filter( | |
75 | lambda match: match[b"language"] == self.language.encode("utf-8"), | |
76 | matches, | |
77 | ) | |
59 | 78 | |
60 | 79 | |
61 | 80 | class KlausPythonFormatter(KlausDefaultFormatter): |
68 | 87 | # import of the tag in some other file. We change the tag lookup mechanics |
69 | 88 | # so that non-import matches are always preferred over import matches. |
70 | 89 | return filter( |
71 | lambda match: match['kind'] != b'i', | |
72 | super(KlausPythonFormatter, self).get_best_ctags_matches(matches) | |
90 | lambda match: match["kind"] != b"i", | |
91 | super(KlausPythonFormatter, self).get_best_ctags_matches(matches), | |
73 | 92 | ) |
74 | 93 | |
75 | 94 | |
76 | def highlight_or_render(code, filename, render_markup=True, ctags=None, ctags_baseurl=None): | |
95 | def highlight_or_render( | |
96 | code, filename, render_markup=True, ctags=None, ctags_baseurl=None | |
97 | ): | |
77 | 98 | """Render code using Pygments, markup (markdown, rst, ...) using the |
78 | 99 | corresponding renderer, if available. |
79 | 100 | |
95 | 116 | lexer = TextLexer() |
96 | 117 | |
97 | 118 | formatter_cls = { |
98 | 'Python': KlausPythonFormatter, | |
119 | "Python": KlausPythonFormatter, | |
99 | 120 | }.get(lexer.name, KlausDefaultFormatter) |
100 | 121 | if ctags: |
101 | 122 | ctags_urlscheme = ctags_baseurl + "%(path)s%(fname)s%(fext)s" |
27 | 27 | return |
28 | 28 | |
29 | 29 | def render_markdown(content): |
30 | return markdown.markdown(content, extensions=['toc', 'extra']) | |
30 | return markdown.markdown(content, extensions=["toc", "extra"]) | |
31 | 31 | |
32 | LANGUAGES.append((['.md', '.mkdn', '.mdwn', '.markdown'], render_markdown)) | |
32 | LANGUAGES.append(([".md", ".mkdn", ".mdwn", ".markdown"], render_markdown)) | |
33 | 33 | |
34 | 34 | |
35 | 35 | def _load_restructured_text(): |
42 | 42 | def render_rest(content): |
43 | 43 | # start by h2 and ignore invalid directives and so on |
44 | 44 | # (most likely from Sphinx) |
45 | settings = {'initial_header_level': 2, 'report_level': 0} | |
46 | return publish_parts(content, | |
47 | writer=Writer(), | |
48 | settings_overrides=settings).get('html_body') | |
45 | settings = {"initial_header_level": 2, "report_level": 0} | |
46 | return publish_parts(content, writer=Writer(), settings_overrides=settings).get( | |
47 | "html_body" | |
48 | ) | |
49 | 49 | |
50 | LANGUAGES.append((['.rst', '.rest'], render_rest)) | |
50 | LANGUAGES.append(([".rst", ".rest"], render_rest)) | |
51 | 51 | |
52 | 52 | |
53 | 53 | for loader in [_load_markdown, _load_restructured_text]: |
8 | 8 | from dulwich.errors import NotTreeError |
9 | 9 | import dulwich, dulwich.patch |
10 | 10 | |
11 | from klaus.utils import force_unicode, parent_directory, repo_human_name, \ | |
12 | encode_for_git, decode_from_git | |
11 | from klaus.utils import ( | |
12 | force_unicode, | |
13 | parent_directory, | |
14 | repo_human_name, | |
15 | encode_for_git, | |
16 | decode_from_git, | |
17 | ) | |
13 | 18 | from klaus.diff import render_diff |
14 | 19 | |
15 | 20 | |
16 | NOT_SET = '__not_set__' | |
21 | NOT_SET = "__not_set__" | |
17 | 22 | |
18 | 23 | |
19 | 24 | def cached_call(key, validator, producer, _cache={}): |
26 | 31 | |
27 | 32 | class FancyRepo(dulwich.repo.Repo): |
28 | 33 | """A wrapper around Dulwich's Repo that adds some helper methods.""" |
34 | ||
29 | 35 | @property |
30 | 36 | def name(self): |
31 | 37 | return repo_human_name(self.path) |
37 | 43 | # If self.get_refs() has changed, we should invalidate the cache. |
38 | 44 | all_refs = self.get_refs() |
39 | 45 | return cached_call( |
40 | key=(id(self), 'get_last_updated_at'), | |
46 | key=(id(self), "get_last_updated_at"), | |
41 | 47 | validator=all_refs, |
42 | producer=lambda: self._get_last_updated_at(all_refs) | |
48 | producer=lambda: self._get_last_updated_at(all_refs), | |
43 | 49 | ) |
44 | 50 | |
45 | 51 | def _get_last_updated_at(self, all_refs): |
51 | 57 | # Whoops. The ref points at a non-existant object |
52 | 58 | pass |
53 | 59 | resolveable_refs.sort( |
54 | key=lambda obj:getattr(obj, 'commit_time', float('-inf')), | |
55 | reverse=True | |
60 | key=lambda obj: getattr(obj, "commit_time", float("-inf")), reverse=True | |
56 | 61 | ) |
57 | 62 | for ref in resolveable_refs: |
58 | 63 | # Find the latest ref that has a commit_time; tags do not |
64 | 69 | @property |
65 | 70 | def cloneurl(self): |
66 | 71 | """Retrieve the gitweb notion of the public clone URL of this repo.""" |
67 | f = self.get_named_file('cloneurl') | |
72 | f = self.get_named_file("cloneurl") | |
68 | 73 | if f is not None: |
69 | 74 | return f.read() |
70 | 75 | c = self.get_config() |
71 | 76 | try: |
72 | return force_unicode(c.get(b'gitweb', b'url')) | |
77 | return force_unicode(c.get(b"gitweb", b"url")) | |
73 | 78 | except KeyError: |
74 | 79 | return None |
75 | 80 | |
79 | 84 | """ |
80 | 85 | # Cache result to speed up repo_list.html template. |
81 | 86 | # If description file mtime has changed, we should invalidate the cache. |
82 | description_file = os.path.join(self._controldir, 'description') | |
87 | description_file = os.path.join(self._controldir, "description") | |
83 | 88 | try: |
84 | description_mtime = os.stat(os.path.join(self._controldir, 'description')).st_mtime | |
89 | description_mtime = os.stat( | |
90 | os.path.join(self._controldir, "description") | |
91 | ).st_mtime | |
85 | 92 | except OSError: |
86 | 93 | description_mtime = None |
87 | 94 | |
88 | 95 | return cached_call( |
89 | key=(id(self), 'get_description'), | |
96 | key=(id(self), "get_description"), | |
90 | 97 | validator=description_mtime, |
91 | producer=self._get_description | |
98 | producer=self._get_description, | |
92 | 99 | ) |
93 | 100 | |
94 | 101 | def _get_description(self): |
100 | 107 | |
101 | 108 | def get_commit(self, rev): |
102 | 109 | """Get commit object identified by `rev` (SHA or branch or tag name).""" |
103 | for prefix in ['refs/heads/', 'refs/tags/', '']: | |
110 | for prefix in ["refs/heads/", "refs/tags/", ""]: | |
104 | 111 | key = prefix + rev |
105 | 112 | try: |
106 | 113 | obj = self[encode_for_git(key)] |
113 | 120 | |
114 | 121 | def get_default_branch(self): |
115 | 122 | """Tries to guess the default repo branch name.""" |
116 | for candidate in ['master', 'trunk', 'default', 'gh-pages']: | |
123 | for candidate in ["master", "trunk", "default", "gh-pages"]: | |
117 | 124 | try: |
118 | 125 | self.get_commit(candidate) |
119 | 126 | return candidate |
128 | 135 | """Return a list of ref names that begin with `prefix`, ordered by the |
129 | 136 | time they have been committed to last. |
130 | 137 | """ |
138 | ||
131 | 139 | def get_commit_time(refname): |
132 | 140 | try: |
133 | 141 | obj = self[refs[refname]] |
149 | 157 | """Return a list of branch names of this repo, ordered by the time they |
150 | 158 | have been committed to last. |
151 | 159 | """ |
152 | return self.get_ref_names_ordered_by_last_commit('refs/heads', exclude) | |
160 | return self.get_ref_names_ordered_by_last_commit("refs/heads", exclude) | |
153 | 161 | |
154 | 162 | def get_tag_names(self): |
155 | 163 | """Return a list of tag names of this repo, ordered by creation time.""" |
156 | return self.get_ref_names_ordered_by_last_commit('refs/tags') | |
164 | return self.get_ref_names_ordered_by_last_commit("refs/tags") | |
157 | 165 | |
158 | 166 | def get_tag_and_branch_shas(self): |
159 | 167 | """Return a list of SHAs of all tags and branches.""" |
160 | tag_shas = self.refs.as_dict(b'refs/tags/').values() | |
161 | branch_shas = self.refs.as_dict(b'refs/heads/').values() | |
168 | tag_shas = self.refs.as_dict(b"refs/tags/").values() | |
169 | branch_shas = self.refs.as_dict(b"refs/heads/").values() | |
162 | 170 | return set(tag_shas) | set(branch_shas) |
163 | 171 | |
164 | 172 | def history(self, commit, path=None, max_commits=None, skip=0): |
175 | 183 | # Therefore we use `git log` here until dulwich gets faster. |
176 | 184 | # For the pure-Python implementation, see the 'purepy-hist' branch. |
177 | 185 | |
178 | cmd = ['git', 'log', '--format=%H'] | |
186 | cmd = ["git", "log", "--format=%H"] | |
179 | 187 | if skip: |
180 | cmd.append('--skip=%d' % skip) | |
188 | cmd.append("--skip=%d" % skip) | |
181 | 189 | if max_commits: |
182 | cmd.append('--max-count=%d' % max_commits) | |
190 | cmd.append("--max-count=%d" % max_commits) | |
183 | 191 | cmd.append(decode_from_git(commit.id)) |
184 | 192 | if path: |
185 | cmd.extend(['--', path]) | |
193 | cmd.extend(["--", path]) | |
186 | 194 | |
187 | 195 | output = subprocess.check_output(cmd, cwd=os.path.abspath(self.path)) |
188 | sha1_sums = output.strip().split(b'\n') | |
196 | sha1_sums = output.strip().split(b"\n") | |
189 | 197 | return [self[sha1] for sha1 in sha1_sums] |
190 | 198 | |
191 | 199 | def blame(self, commit, path): |
193 | 201 | the file, the list contains the commit that last changed that line. |
194 | 202 | """ |
195 | 203 | # XXX see comment in `.history()` |
196 | cmd = ['git', 'blame', '-ls', '--root', decode_from_git(commit.id), '--', path] | |
204 | cmd = ["git", "blame", "-ls", "--root", decode_from_git(commit.id), "--", path] | |
197 | 205 | output = subprocess.check_output(cmd, cwd=os.path.abspath(self.path)) |
198 | sha1_sums = [line[:40] for line in output.strip().split(b'\n')] | |
199 | return [None if self[sha1] is None else decode_from_git(self[sha1].id) for sha1 in sha1_sums] | |
206 | sha1_sums = [line[:40] for line in output.strip().split(b"\n")] | |
207 | return [ | |
208 | None if self[sha1] is None else decode_from_git(self[sha1].id) | |
209 | for sha1 in sha1_sums | |
210 | ] | |
200 | 211 | |
201 | 212 | def get_blob_or_tree(self, commit, path): |
202 | 213 | """Return the Git tree or blob object for `path` at `commit`.""" |
203 | 214 | try: |
204 | (mode, oid) = tree_lookup_path(self.__getitem__, commit.tree, | |
205 | encode_for_git(path)) | |
215 | (mode, oid) = tree_lookup_path( | |
216 | self.__getitem__, commit.tree, encode_for_git(path) | |
217 | ) | |
206 | 218 | except NotTreeError: |
207 | 219 | # Some part of the path was a file where a folder was expected. |
208 | 220 | # Example: path="/path/to/foo.txt" but "to" is a file in "/path". |
233 | 245 | dirs.sort(key=keyfunc) |
234 | 246 | |
235 | 247 | if path: |
236 | dirs.insert(0, ('..', parent_directory(path))) | |
237 | ||
238 | return {'submodules': submodules, 'dirs' : dirs, 'files' : files} | |
248 | dirs.insert(0, ("..", parent_directory(path))) | |
249 | ||
250 | return {"submodules": submodules, "dirs": dirs, "files": files} | |
239 | 251 | |
240 | 252 | def commit_diff(self, commit): |
241 | 253 | """Return the list of changes introduced by `commit`.""" |
246 | 258 | else: |
247 | 259 | parent_tree = None |
248 | 260 | |
249 | summary = {'nfiles': 0, 'nadditions': 0, 'ndeletions': 0} | |
261 | summary = {"nfiles": 0, "nadditions": 0, "ndeletions": 0} | |
250 | 262 | file_changes = [] # the changes in detail |
251 | 263 | |
252 | 264 | dulwich_changes = self.object_store.tree_changes(parent_tree, commit.tree) |
253 | 265 | for (oldpath, newpath), (oldmode, newmode), (oldsha, newsha) in dulwich_changes: |
254 | summary['nfiles'] += 1 | |
255 | try: | |
256 | oldblob = self.object_store[oldsha] if oldsha else Blob.from_string(b'') | |
257 | newblob = self.object_store[newsha] if newsha else Blob.from_string(b'') | |
266 | summary["nfiles"] += 1 | |
267 | try: | |
268 | oldblob = self.object_store[oldsha] if oldsha else Blob.from_string(b"") | |
269 | newblob = self.object_store[newsha] if newsha else Blob.from_string(b"") | |
258 | 270 | except KeyError: |
259 | 271 | # newsha/oldsha are probably related to submodules. |
260 | 272 | # Dulwich will handle that. |
261 | 273 | pass |
262 | 274 | |
263 | 275 | # Check for binary files -- can't show diffs for these |
264 | if guess_is_binary(newblob) or \ | |
265 | guess_is_binary(oldblob): | |
266 | file_changes.append({ | |
267 | 'is_binary': True, | |
268 | 'old_filename': oldpath or '/dev/null', | |
269 | 'new_filename': newpath or '/dev/null', | |
270 | 'chunks': None | |
271 | }) | |
276 | if guess_is_binary(newblob) or guess_is_binary(oldblob): | |
277 | file_changes.append( | |
278 | { | |
279 | "is_binary": True, | |
280 | "old_filename": oldpath or "/dev/null", | |
281 | "new_filename": newpath or "/dev/null", | |
282 | "chunks": None, | |
283 | } | |
284 | ) | |
272 | 285 | continue |
273 | 286 | |
274 | 287 | additions, deletions, chunks = render_diff( |
275 | oldblob.splitlines(), newblob.splitlines()) | |
288 | oldblob.splitlines(), newblob.splitlines() | |
289 | ) | |
276 | 290 | change = { |
277 | 'is_binary': False, | |
278 | 'old_filename': oldpath or '/dev/null', | |
279 | 'new_filename': newpath or '/dev/null', | |
280 | 'chunks': chunks, | |
281 | 'additions': additions, | |
282 | 'deletions': deletions, | |
291 | "is_binary": False, | |
292 | "old_filename": oldpath or "/dev/null", | |
293 | "new_filename": newpath or "/dev/null", | |
294 | "chunks": chunks, | |
295 | "additions": additions, | |
296 | "deletions": deletions, | |
283 | 297 | } |
284 | summary['nadditions'] += additions | |
285 | summary['ndeletions'] += deletions | |
298 | summary["nadditions"] += additions | |
299 | summary["ndeletions"] += deletions | |
286 | 300 | file_changes.append(change) |
287 | 301 | |
288 | 302 | return summary, file_changes |
293 | 307 | else: |
294 | 308 | parent_tree = None |
295 | 309 | bytesio = io.BytesIO() |
296 | dulwich.patch.write_tree_diff(bytesio, self.object_store, parent_tree, commit.tree) | |
310 | dulwich.patch.write_tree_diff( | |
311 | bytesio, self.object_store, parent_tree, commit.tree | |
312 | ) | |
297 | 313 | return bytesio.getvalue() |
298 | 314 | |
299 | 315 | def freeze(self): |
304 | 320 | """A special version of FancyRepo that assumes the underlying Git |
305 | 321 | repository does not change. Used for performance optimizations. |
306 | 322 | """ |
323 | ||
307 | 324 | def __init__(self, repo): |
308 | 325 | self.__repo = repo |
309 | 326 | self.__last_updated_at = NOT_SET |
310 | 327 | |
311 | 328 | def __setattr__(self, name, value): |
312 | if not name.startswith('_FrozenFancyRepo__'): | |
329 | if not name.startswith("_FrozenFancyRepo__"): | |
313 | 330 | raise TypeError("Can't set %s attribute on FrozenFancyRepo" % name) |
314 | 331 | super(FrozenFancyRepo, self).__setattr__(name, value) |
315 | 332 | |
324 | 341 | |
325 | 342 | class InvalidRepo: |
326 | 343 | """Represent an invalid repository and store pertinent data.""" |
344 | ||
327 | 345 | def __init__(self, path): |
328 | 346 | self.path = path |
329 | 347 |
8 | 8 | import warnings |
9 | 9 | import subprocess |
10 | 10 | import six |
11 | ||
11 | 12 | try: |
12 | 13 | import chardet |
13 | 14 | except ImportError: |
47 | 48 | :param app: the WSGI application |
48 | 49 | :param num_proxies: the number of proxy servers in front of the app. |
49 | 50 | """ |
51 | ||
50 | 52 | def __call__(self, environ, start_response): |
51 | script_name = environ.get('HTTP_X_SCRIPT_NAME') | |
53 | script_name = environ.get("HTTP_X_SCRIPT_NAME") | |
52 | 54 | if script_name is not None: |
53 | if script_name.endswith('/'): | |
54 | warnings.warn( | |
55 | "'X-Script-Name' header should not end in '/' (found: %r). " | |
56 | "Please fix your proxy's configuration." % script_name) | |
57 | script_name = script_name.rstrip('/') | |
58 | environ['SCRIPT_NAME'] = script_name | |
55 | if script_name.endswith("/"): | |
56 | warnings.warn( | |
57 | "'X-Script-Name' header should not end in '/' (found: %r). " | |
58 | "Please fix your proxy's configuration." % script_name | |
59 | ) | |
60 | script_name = script_name.rstrip("/") | |
61 | environ["SCRIPT_NAME"] = script_name | |
59 | 62 | return super(ProxyFix, self).__call__(environ, start_response) |
60 | 63 | |
61 | 64 | |
75 | 78 | |
76 | 79 | Snippet stolen from http://flask.pocoo.org/snippets/35/ |
77 | 80 | """ |
81 | ||
78 | 82 | def __init__(self, app): |
79 | 83 | warnings.warn( |
80 | 84 | "'klaus.utils.SubUri' is deprecated and will be removed. " |
81 | 85 | "Please upgrade your code to use 'klaus.utils.ProxyFix' instead.", |
82 | DeprecationWarning | |
86 | DeprecationWarning, | |
83 | 87 | ) |
84 | 88 | self.app = app |
85 | 89 | |
86 | 90 | def __call__(self, environ, start_response): |
87 | script_name = environ.get('HTTP_X_SCRIPT_NAME', '') | |
91 | script_name = environ.get("HTTP_X_SCRIPT_NAME", "") | |
88 | 92 | if script_name: |
89 | environ['SCRIPT_NAME'] = script_name.rstrip('/') | |
90 | ||
91 | if script_name and environ['PATH_INFO'].startswith(script_name): | |
93 | environ["SCRIPT_NAME"] = script_name.rstrip("/") | |
94 | ||
95 | if script_name and environ["PATH_INFO"].startswith(script_name): | |
92 | 96 | # strip `script_name` from PATH_INFO |
93 | environ['PATH_INFO'] = environ['PATH_INFO'][len(script_name):] | |
94 | ||
95 | if 'HTTP_X_SCHEME' in environ: | |
96 | environ['wsgi.url_scheme'] = environ['HTTP_X_SCHEME'] | |
97 | environ["PATH_INFO"] = environ["PATH_INFO"][len(script_name) :] | |
98 | ||
99 | if "HTTP_X_SCHEME" in environ: | |
100 | environ["wsgi.url_scheme"] = environ["HTTP_X_SCHEME"] | |
97 | 101 | |
98 | 102 | return self.app(environ, start_response) |
99 | 103 | |
104 | 108 | |
105 | 109 | |
106 | 110 | def formattimestamp(timestamp): |
107 | return datetime.datetime.fromtimestamp(timestamp).strftime('%b %d, %Y %H:%M:%S') | |
111 | return datetime.datetime.fromtimestamp(timestamp).strftime("%b %d, %Y %H:%M:%S") | |
108 | 112 | |
109 | 113 | |
110 | 114 | def guess_is_binary(dulwich_blob): |
111 | return any(b'\0' in chunk for chunk in dulwich_blob.chunked) | |
115 | return any(b"\0" in chunk for chunk in dulwich_blob.chunked) | |
112 | 116 | |
113 | 117 | |
114 | 118 | def guess_is_image(filename): |
115 | 119 | mime, _ = mimetypes.guess_type(filename) |
116 | 120 | if mime is None: |
117 | 121 | return False |
118 | return mime.startswith('image/') | |
122 | return mime.startswith("image/") | |
119 | 123 | |
120 | 124 | |
121 | 125 | def encode_for_git(s): |
122 | 126 | # XXX This assumes everything to be UTF-8 encoded |
123 | return s.encode('utf8') | |
127 | return s.encode("utf8") | |
124 | 128 | |
125 | 129 | |
126 | 130 | def decode_from_git(b): |
127 | 131 | # XXX This assumes everything to be UTF-8 encoded |
128 | return b.decode('utf8') | |
132 | return b.decode("utf8") | |
129 | 133 | |
130 | 134 | |
131 | 135 | def force_unicode(s): |
137 | 141 | last_exc = None |
138 | 142 | # Try some default encodings: |
139 | 143 | try: |
140 | return s.decode('utf-8') | |
144 | return s.decode("utf-8") | |
141 | 145 | except UnicodeDecodeError as exc: |
142 | 146 | last_exc = exc |
143 | 147 | try: |
147 | 151 | |
148 | 152 | if chardet is not None: |
149 | 153 | # Try chardet, if available |
150 | encoding = chardet.detect(s)['encoding'] | |
154 | encoding = chardet.detect(s)["encoding"] | |
151 | 155 | if encoding is not None: |
152 | 156 | return s.decode(encoding) |
153 | 157 | |
163 | 167 | >>> extract_author_name("noname@example.com") |
164 | 168 | "noname@example.com" |
165 | 169 | """ |
166 | match = re.match('^(.*?)<.*?>$', email) | |
170 | match = re.match("^(.*?)<.*?>$", email) | |
167 | 171 | if match: |
168 | 172 | return match.group(1).strip() |
169 | 173 | return email |
171 | 175 | |
172 | 176 | def is_hex_prefix(s): |
173 | 177 | if len(s) % 2: |
174 | s += '0' | |
178 | s += "0" | |
175 | 179 | try: |
176 | 180 | binascii.unhexlify(s) |
177 | 181 | return True |
196 | 200 | [('foo', 'foo'), ('bar', 'foo/bar'), ('spam', 'foo/bar/spam')] |
197 | 201 | """ |
198 | 202 | seen = [] |
199 | for part in path.split('/'): | |
203 | for part in path.split("/"): | |
200 | 204 | seen.append(part) |
201 | yield part, '/'.join(seen) | |
205 | yield part, "/".join(seen) | |
202 | 206 | |
203 | 207 | |
204 | 208 | def shorten_message(msg): |
205 | return msg.split('\n')[0] | |
209 | return msg.split("\n")[0] | |
206 | 210 | |
207 | 211 | |
208 | 212 | def replace_dupes(ls, replacement): |
229 | 233 | This is used to display the "powered by klaus $VERSION" footer on each page, |
230 | 234 | $VERSION being either the SHA guessed by this function or the latest release number. |
231 | 235 | """ |
232 | git_dir = os.path.join(os.path.dirname(__file__), '..', '.git') | |
233 | try: | |
234 | return force_unicode(subprocess.check_output( | |
235 | ['git', 'log', '--format=%h', '-n', '1'], | |
236 | cwd=git_dir | |
237 | ).strip()) | |
236 | git_dir = os.path.join(os.path.dirname(__file__), "..", ".git") | |
237 | try: | |
238 | return force_unicode( | |
239 | subprocess.check_output( | |
240 | ["git", "log", "--format=%h", "-n", "1"], cwd=git_dir | |
241 | ).strip() | |
242 | ) | |
238 | 243 | except OSError: |
239 | 244 | # Either the git executable couldn't be found in the OS's PATH |
240 | 245 | # or no ".git" directory exists, i.e. this is no "bleeding-edge" installation. |
241 | 246 | return None |
242 | 247 | |
243 | 248 | |
244 | def sanitize_branch_name(name, chars='./', repl='-'): | |
249 | def sanitize_branch_name(name, chars="./", repl="-"): | |
245 | 250 | for char in chars: |
246 | 251 | name = name.replace(char, repl) |
247 | 252 | return name |
248 | 253 | |
249 | 254 | |
250 | 255 | def escape_html(s): |
251 | return s.replace(b'&', b'&').replace(b'<', b'<') \ | |
252 | .replace(b'>', b'>').replace(b'"', b'"') | |
256 | return ( | |
257 | s.replace(b"&", b"&") | |
258 | .replace(b"<", b"<") | |
259 | .replace(b">", b">") | |
260 | .replace(b'"', b""") | |
261 | ) | |
253 | 262 | |
254 | 263 | |
255 | 264 | def tarball_basename(repo_name, rev): |
256 | 265 | """Determine the name for a tarball.""" |
257 | rev = sanitize_branch_name(rev, chars='/') | |
258 | if rev.startswith(repo_name + '-'): | |
266 | rev = sanitize_branch_name(rev, chars="/") | |
267 | if rev.startswith(repo_name + "-"): | |
259 | 268 | # If the rev is a tag name that already starts with the repo name, |
260 | 269 | # skip it. |
261 | 270 | return rev |
262 | elif len(rev) >= 2 and rev[0] == 'v' and not rev[1].isalpha(): | |
271 | elif len(rev) >= 2 and rev[0] == "v" and not rev[1].isalpha(): | |
263 | 272 | # If the rev is a tag name prefixed by a 'v', skip the 'v'. |
264 | 273 | # So, v-1.0 -> 1.0, v1.0 -> 1.0, but vanilla -> vanilla. |
265 | 274 | return "%s-%s" % (repo_name, rev[1:]) |
278 | 287 | 3. /x/y -> y |
279 | 288 | """ |
280 | 289 | name = path.rstrip(os.sep).split(os.sep)[-1] |
281 | if name.endswith('.git'): | |
290 | if name.endswith(".git"): | |
282 | 291 | name = name[:-4] |
283 | 292 | return name |
18 | 18 | ctags = None |
19 | 19 | else: |
20 | 20 | from klaus import ctagscache |
21 | ||
21 | 22 | CTAGS_CACHE = ctagscache.CTagsCache() |
22 | 23 | |
23 | 24 | from klaus import markup |
24 | 25 | from klaus.highlighting import highlight_or_render |
25 | from klaus.utils import parent_directory, subpaths, force_unicode, guess_is_binary, \ | |
26 | guess_is_image, replace_dupes, sanitize_branch_name, encode_for_git | |
27 | ||
28 | ||
29 | README_FILENAMES = [b'README', b'README.md', b'README.mkdn', b'README.mdwn', b'README.markdown', b'README.rst'] | |
26 | from klaus.utils import ( | |
27 | parent_directory, | |
28 | subpaths, | |
29 | force_unicode, | |
30 | guess_is_binary, | |
31 | guess_is_image, | |
32 | replace_dupes, | |
33 | sanitize_branch_name, | |
34 | encode_for_git, | |
35 | ) | |
36 | ||
37 | ||
38 | README_FILENAMES = [ | |
39 | b"README", | |
40 | b"README.md", | |
41 | b"README.mkdn", | |
42 | b"README.mdwn", | |
43 | b"README.markdown", | |
44 | b"README.rst", | |
45 | ] | |
30 | 46 | |
31 | 47 | |
32 | 48 | def repo_list(): |
33 | 49 | """Show a list of all repos and can be sorted by last update.""" |
34 | if 'by-name' in request.args: | |
35 | order_by = 'name' | |
50 | if "by-name" in request.args: | |
51 | order_by = "name" | |
36 | 52 | sort_key = lambda repo: repo.name |
37 | 53 | else: |
38 | order_by = 'last_updated' | |
54 | order_by = "last_updated" | |
39 | 55 | sort_key = lambda repo: (-(repo.fast_get_last_updated_at() or -1), repo.name) |
40 | repos = sorted([repo.freeze() for repo in current_app.valid_repos.values()], | |
41 | key=sort_key) | |
42 | invalid_repos = sorted(current_app.invalid_repos.values(), key=lambda repo: repo.name) | |
43 | return render_template('repo_list.html', repos=repos, invalid_repos=invalid_repos, | |
44 | order_by=order_by, base_href=None) | |
45 | ||
56 | repos = sorted( | |
57 | [repo.freeze() for repo in current_app.valid_repos.values()], key=sort_key | |
58 | ) | |
59 | invalid_repos = sorted( | |
60 | current_app.invalid_repos.values(), key=lambda repo: repo.name | |
61 | ) | |
62 | return render_template( | |
63 | "repo_list.html", | |
64 | repos=repos, | |
65 | invalid_repos=invalid_repos, | |
66 | order_by=order_by, | |
67 | base_href=None, | |
68 | ) | |
46 | 69 | |
47 | 70 | |
48 | 71 | def robots_txt(): |
49 | 72 | """Serve the robots.txt file to manage the indexing of the site by search engines.""" |
50 | return current_app.send_static_file('robots.txt') | |
73 | return current_app.send_static_file("robots.txt") | |
51 | 74 | |
52 | 75 | |
53 | 76 | def _get_repo_and_rev(repo, rev=None, path=None): |
82 | 105 | |
83 | 106 | def _get_submodule(repo, commit, path): |
84 | 107 | """Retrieve submodule URL and path.""" |
85 | submodule_blob = repo.get_blob_or_tree(commit, '.gitmodules') | |
108 | submodule_blob = repo.get_blob_or_tree(commit, ".gitmodules") | |
86 | 109 | config = dulwich.config.ConfigFile.from_file( |
87 | BytesIO(submodule_blob.as_raw_string())) | |
88 | key = (b'submodule', path) | |
89 | submodule_url = config.get(key, b'url') | |
90 | submodule_path = config.get(key, b'path') | |
110 | BytesIO(submodule_blob.as_raw_string()) | |
111 | ) | |
112 | key = (b"submodule", path) | |
113 | submodule_url = config.get(key, b"url") | |
114 | submodule_path = config.get(key, b"path") | |
91 | 115 | return (submodule_url, submodule_path) |
92 | 116 | |
93 | 117 | |
103 | 127 | is "/foo/bar", only commits related to "/foo/bar" are displayed, and if |
104 | 128 | `rev` is "master", the history of the "master" branch is displayed. |
105 | 129 | """ |
130 | ||
106 | 131 | def __init__(self, view_name): |
107 | 132 | self.view_name = view_name |
108 | 133 | self.context = {} |
109 | 134 | |
110 | def dispatch_request(self, repo, rev=None, path=''): | |
135 | def dispatch_request(self, repo, rev=None, path=""): | |
111 | 136 | """Dispatch repository, revision (if any) and path (if any). To retain |
112 | 137 | compatibility with :func:`url_for`, view routing uses two arguments: |
113 | 138 | rev and path, although a single path is sufficient (from Git's point of |
120 | 145 | |
121 | 146 | [1] https://github.com/jonashaag/klaus/issues/36#issuecomment-23990266 |
122 | 147 | """ |
123 | self.make_template_context(repo, rev, path.strip('/')) | |
148 | self.make_template_context(repo, rev, path.strip("/")) | |
124 | 149 | return self.get_response() |
125 | 150 | |
126 | 151 | def get_response(self): |
135 | 160 | raise NotFound("File not found") |
136 | 161 | |
137 | 162 | self.context = { |
138 | 'view': self.view_name, | |
139 | 'repo': repo, | |
140 | 'rev': rev, | |
141 | 'commit': commit, | |
142 | 'branches': repo.get_branch_names(exclude=rev), | |
143 | 'tags': repo.get_tag_names(), | |
144 | 'path': path, | |
145 | 'blob_or_tree': blob_or_tree, | |
146 | 'subpaths': list(subpaths(path)) if path else None, | |
147 | 'base_href': None, | |
163 | "view": self.view_name, | |
164 | "repo": repo, | |
165 | "rev": rev, | |
166 | "commit": commit, | |
167 | "branches": repo.get_branch_names(exclude=rev), | |
168 | "tags": repo.get_tag_names(), | |
169 | "path": path, | |
170 | "blob_or_tree": blob_or_tree, | |
171 | "subpaths": list(subpaths(path)) if path else None, | |
172 | "base_href": None, | |
148 | 173 | } |
149 | 174 | |
150 | 175 | |
151 | 176 | class CommitView(BaseRepoView): |
152 | template_name = 'view_commit.html' | |
177 | template_name = "view_commit.html" | |
153 | 178 | |
154 | 179 | |
155 | 180 | class PatchView(BaseRepoView): |
156 | 181 | def get_response(self): |
157 | 182 | return Response( |
158 | self.context['repo'].raw_commit_diff(self.context['commit']), | |
159 | mimetype='text/plain', | |
183 | self.context["repo"].raw_commit_diff(self.context["commit"]), | |
184 | mimetype="text/plain", | |
160 | 185 | ) |
161 | 186 | |
162 | 187 | |
163 | 188 | class TreeViewMixin(object): |
164 | 189 | """The logic required for displaying the current directory in the sidebar.""" |
190 | ||
165 | 191 | def make_template_context(self, *args): |
166 | 192 | super(TreeViewMixin, self).make_template_context(*args) |
167 | self.context['root_tree'] = self.listdir() | |
193 | self.context["root_tree"] = self.listdir() | |
168 | 194 | |
169 | 195 | def listdir(self): |
170 | 196 | """Return a list of directories and files in the current path of the selected commit.""" |
171 | 197 | root_directory = self.get_root_directory() |
172 | return self.context['repo'].listdir( | |
173 | self.context['commit'], | |
174 | root_directory | |
175 | ) | |
198 | return self.context["repo"].listdir(self.context["commit"], root_directory) | |
176 | 199 | |
177 | 200 | def get_root_directory(self): |
178 | root_directory = self.context['path'] | |
179 | if isinstance(self.context['blob_or_tree'], dulwich.objects.Blob): | |
201 | root_directory = self.context["path"] | |
202 | if isinstance(self.context["blob_or_tree"], dulwich.objects.Blob): | |
180 | 203 | # 'path' is a file (not folder) name |
181 | 204 | root_directory = parent_directory(root_directory) |
182 | 205 | return root_directory |
184 | 207 | |
185 | 208 | class HistoryView(TreeViewMixin, BaseRepoView): |
186 | 209 | """Show commits of a branch + path, just like `git log`. With pagination.""" |
187 | template_name = 'history.html' | |
210 | ||
211 | template_name = "history.html" | |
188 | 212 | |
189 | 213 | def make_template_context(self, *args): |
190 | 214 | super(HistoryView, self).make_template_context(*args) |
191 | 215 | |
192 | 216 | try: |
193 | page = int(request.args.get('page')) | |
217 | page = int(request.args.get("page")) | |
194 | 218 | except (TypeError, ValueError): |
195 | 219 | page = 0 |
196 | 220 | |
197 | self.context['page'] = page | |
221 | self.context["page"] = page | |
198 | 222 | |
199 | 223 | history_length = 30 |
200 | 224 | if page: |
201 | skip = (self.context['page']-1) * 30 + 10 | |
225 | skip = (self.context["page"] - 1) * 30 + 10 | |
202 | 226 | if page > 7: |
203 | self.context['previous_pages'] = [0, 1, 2, None] + list(range(page))[-3:] | |
227 | self.context["previous_pages"] = [0, 1, 2, None] + list(range(page))[ | |
228 | -3: | |
229 | ] | |
204 | 230 | else: |
205 | self.context['previous_pages'] = range(page) | |
231 | self.context["previous_pages"] = range(page) | |
206 | 232 | else: |
207 | 233 | skip = 0 |
208 | 234 | |
209 | history = self.context['repo'].history( | |
210 | self.context['commit'], | |
211 | self.context['path'], | |
212 | history_length + 1, | |
213 | skip | |
235 | history = self.context["repo"].history( | |
236 | self.context["commit"], self.context["path"], history_length + 1, skip | |
214 | 237 | ) |
215 | 238 | if len(history) == history_length + 1: |
216 | 239 | # At least one more commit for next page left |
220 | 243 | else: |
221 | 244 | more_commits = False |
222 | 245 | |
223 | self.context.update({ | |
224 | 'history': history, | |
225 | 'more_commits': more_commits, | |
226 | }) | |
246 | self.context.update( | |
247 | { | |
248 | "history": history, | |
249 | "more_commits": more_commits, | |
250 | } | |
251 | ) | |
227 | 252 | |
228 | 253 | |
229 | 254 | class IndexView(TreeViewMixin, BaseRepoView): |
230 | 255 | """Show commits of a branch, just like `git log`. |
231 | 256 | |
232 | 257 | Also, README, if available.""" |
233 | template_name = 'index.html' | |
258 | ||
259 | template_name = "index.html" | |
234 | 260 | |
235 | 261 | def _get_readme(self): |
236 | tree = self.context['repo'][self.context['commit'].tree] | |
262 | tree = self.context["repo"][self.context["commit"].tree] | |
237 | 263 | for name in README_FILENAMES: |
238 | 264 | if name in tree: |
239 | readme_data = self.context['repo'][tree[name][1]].data | |
265 | readme_data = self.context["repo"][tree[name][1]].data | |
240 | 266 | readme_filename = name |
241 | 267 | return (readme_filename, readme_data) |
242 | 268 | else: |
245 | 271 | def make_template_context(self, *args): |
246 | 272 | super(IndexView, self).make_template_context(*args) |
247 | 273 | |
248 | self.context['base_href'] = url_for( | |
249 | 'blob', | |
250 | repo=self.context['repo'].name, | |
251 | rev=self.context['rev'], | |
252 | path='' | |
253 | ) | |
254 | ||
255 | self.context['page'] = 0 | |
274 | self.context["base_href"] = url_for( | |
275 | "blob", repo=self.context["repo"].name, rev=self.context["rev"], path="" | |
276 | ) | |
277 | ||
278 | self.context["page"] = 0 | |
256 | 279 | history_length = 10 |
257 | history = self.context['repo'].history( | |
258 | self.context['commit'], | |
259 | self.context['path'], | |
280 | history = self.context["repo"].history( | |
281 | self.context["commit"], | |
282 | self.context["path"], | |
260 | 283 | history_length + 1, |
261 | 284 | skip=0, |
262 | 285 | ) |
268 | 291 | else: |
269 | 292 | more_commits = False |
270 | 293 | |
271 | self.context.update({ | |
272 | 'history': history, | |
273 | 'more_commits': more_commits, | |
274 | }) | |
294 | self.context.update( | |
295 | { | |
296 | "history": history, | |
297 | "more_commits": more_commits, | |
298 | } | |
299 | ) | |
275 | 300 | try: |
276 | 301 | (readme_filename, readme_data) = self._get_readme() |
277 | 302 | except KeyError: |
278 | self.context.update({ | |
279 | 'is_markup': None, | |
280 | 'rendered_code': None, | |
281 | }) | |
282 | else: | |
283 | readme_filename = force_unicode(readme_filename) | |
303 | self.context.update( | |
304 | { | |
305 | "is_markup": None, | |
306 | "rendered_code": None, | |
307 | } | |
308 | ) | |
309 | else: | |
310 | readme_filename = force_unicode(readme_filename) | |
284 | 311 | readme_data = force_unicode(readme_data) |
285 | self.context.update({ | |
286 | 'is_markup': markup.can_render(readme_filename), | |
287 | 'rendered_code': highlight_or_render(readme_data, readme_filename) | |
288 | }) | |
312 | self.context.update( | |
313 | { | |
314 | "is_markup": markup.can_render(readme_filename), | |
315 | "rendered_code": highlight_or_render(readme_data, readme_filename), | |
316 | } | |
317 | ) | |
289 | 318 | |
290 | 319 | |
291 | 320 | class BaseBlobView(BaseRepoView): |
292 | 321 | def make_template_context(self, *args): |
293 | 322 | super(BaseBlobView, self).make_template_context(*args) |
294 | if not isinstance(self.context['blob_or_tree'], dulwich.objects.Blob): | |
323 | if not isinstance(self.context["blob_or_tree"], dulwich.objects.Blob): | |
295 | 324 | raise NotFound("Not a blob") |
296 | self.context['filename'] = os.path.basename(self.context['path']) | |
325 | self.context["filename"] = os.path.basename(self.context["path"]) | |
297 | 326 | |
298 | 327 | |
299 | 328 | class SubmoduleView(BaseRepoView): |
300 | 329 | """Show an information page about a submodule.""" |
301 | template_name = 'submodule.html' | |
330 | ||
331 | template_name = "submodule.html" | |
302 | 332 | |
303 | 333 | def make_template_context(self, repo, rev, path): |
304 | 334 | repo, rev, path, commit = _get_repo_and_rev(repo, rev, path) |
305 | 335 | |
306 | 336 | try: |
307 | 337 | submodule_rev = tree_lookup_path( |
308 | repo.__getitem__, commit.tree, encode_for_git(path))[1] | |
338 | repo.__getitem__, commit.tree, encode_for_git(path) | |
339 | )[1] | |
309 | 340 | except KeyError: |
310 | 341 | raise NotFound("Parent path for submodule missing") |
311 | 342 | |
312 | 343 | try: |
313 | 344 | (submodule_url, submodule_path) = _get_submodule( |
314 | repo, commit, encode_for_git(path)) | |
345 | repo, commit, encode_for_git(path) | |
346 | ) | |
315 | 347 | except KeyError: |
316 | 348 | submodule_url = None |
317 | 349 | submodule_path = None |
321 | 353 | # submodule_path, revision submodule_rev. |
322 | 354 | |
323 | 355 | self.context = { |
324 | 'view': self.view_name, | |
325 | 'repo': repo, | |
326 | 'rev': rev, | |
327 | 'commit': commit, | |
328 | 'branches': repo.get_branch_names(exclude=rev), | |
329 | 'tags': repo.get_tag_names(), | |
330 | 'path': path, | |
331 | 'subpaths': list(subpaths(path)) if path else None, | |
332 | 'submodule_url': force_unicode(submodule_url), | |
333 | 'submodule_path': force_unicode(submodule_path), | |
334 | 'submodule_rev': force_unicode(submodule_rev), | |
335 | 'base_href': None, | |
356 | "view": self.view_name, | |
357 | "repo": repo, | |
358 | "rev": rev, | |
359 | "commit": commit, | |
360 | "branches": repo.get_branch_names(exclude=rev), | |
361 | "tags": repo.get_tag_names(), | |
362 | "path": path, | |
363 | "subpaths": list(subpaths(path)) if path else None, | |
364 | "submodule_url": force_unicode(submodule_url), | |
365 | "submodule_path": force_unicode(submodule_path), | |
366 | "submodule_rev": force_unicode(submodule_rev), | |
367 | "base_href": None, | |
336 | 368 | } |
337 | 369 | |
338 | 370 | |
339 | 371 | class BaseFileView(TreeViewMixin, BaseBlobView): |
340 | 372 | """Base for FileView and BlameView.""" |
373 | ||
341 | 374 | def render_code(self, render_markup): |
342 | should_use_ctags = current_app.should_use_ctags(self.context['repo'], | |
343 | self.context['commit']) | |
375 | should_use_ctags = current_app.should_use_ctags( | |
376 | self.context["repo"], self.context["commit"] | |
377 | ) | |
344 | 378 | if should_use_ctags: |
345 | 379 | if ctags is None: |
346 | 380 | raise ImportError("Ctags enabled but python-ctags not installed") |
347 | 381 | ctags_base_url = url_for( |
348 | 382 | self.view_name, |
349 | repo=self.context['repo'].name, | |
350 | rev=self.context['rev'], | |
351 | path='' | |
383 | repo=self.context["repo"].name, | |
384 | rev=self.context["rev"], | |
385 | path="", | |
352 | 386 | ) |
353 | 387 | ctags_tagsfile = CTAGS_CACHE.get_tagsfile( |
354 | self.context['repo'].path, | |
355 | self.context['commit'].id | |
388 | self.context["repo"].path, self.context["commit"].id | |
356 | 389 | ) |
357 | 390 | ctags_args = { |
358 | 'ctags': ctags.CTags(ctags_tagsfile.encode(sys.getfilesystemencoding())), | |
359 | 'ctags_baseurl': ctags_base_url, | |
391 | "ctags": ctags.CTags( | |
392 | ctags_tagsfile.encode(sys.getfilesystemencoding()) | |
393 | ), | |
394 | "ctags_baseurl": ctags_base_url, | |
360 | 395 | } |
361 | 396 | else: |
362 | 397 | ctags_args = {} |
363 | 398 | |
364 | 399 | return highlight_or_render( |
365 | force_unicode(self.context['blob_or_tree'].data), | |
366 | self.context['filename'], | |
400 | force_unicode(self.context["blob_or_tree"].data), | |
401 | self.context["filename"], | |
367 | 402 | render_markup, |
368 | 403 | **ctags_args |
369 | 404 | ) |
370 | 405 | |
371 | 406 | def make_template_context(self, *args): |
372 | 407 | super(BaseFileView, self).make_template_context(*args) |
373 | self.context.update({ | |
374 | 'can_render': True, | |
375 | 'is_binary': False, | |
376 | 'too_large': False, | |
377 | 'is_markup': False, | |
378 | }) | |
379 | ||
380 | binary = guess_is_binary(self.context['blob_or_tree']) | |
381 | too_large = sum(map(len, self.context['blob_or_tree'].chunked)) > 100*1024 | |
408 | self.context.update( | |
409 | { | |
410 | "can_render": True, | |
411 | "is_binary": False, | |
412 | "too_large": False, | |
413 | "is_markup": False, | |
414 | } | |
415 | ) | |
416 | ||
417 | binary = guess_is_binary(self.context["blob_or_tree"]) | |
418 | too_large = sum(map(len, self.context["blob_or_tree"].chunked)) > 100 * 1024 | |
382 | 419 | if binary: |
383 | self.context.update({ | |
384 | 'can_render': False, | |
385 | 'is_binary': True, | |
386 | 'is_image': guess_is_image(self.context['filename']), | |
387 | }) | |
420 | self.context.update( | |
421 | { | |
422 | "can_render": False, | |
423 | "is_binary": True, | |
424 | "is_image": guess_is_image(self.context["filename"]), | |
425 | } | |
426 | ) | |
388 | 427 | elif too_large: |
389 | self.context.update({ | |
390 | 'can_render': False, | |
391 | 'too_large': True, | |
392 | }) | |
428 | self.context.update( | |
429 | { | |
430 | "can_render": False, | |
431 | "too_large": True, | |
432 | } | |
433 | ) | |
393 | 434 | |
394 | 435 | |
395 | 436 | class FileView(BaseFileView): |
396 | 437 | """Shows a file rendered using ``pygmentize``.""" |
397 | template_name = 'view_blob.html' | |
438 | ||
439 | template_name = "view_blob.html" | |
398 | 440 | |
399 | 441 | def make_template_context(self, *args): |
400 | 442 | super(FileView, self).make_template_context(*args) |
401 | if self.context['can_render']: | |
402 | render_markup = 'markup' not in request.args | |
403 | self.context.update({ | |
404 | 'is_markup': markup.can_render(self.context['filename']), | |
405 | 'render_markup': render_markup, | |
406 | 'rendered_code': self.render_code(render_markup), | |
407 | }) | |
443 | if self.context["can_render"]: | |
444 | render_markup = "markup" not in request.args | |
445 | self.context.update( | |
446 | { | |
447 | "is_markup": markup.can_render(self.context["filename"]), | |
448 | "render_markup": render_markup, | |
449 | "rendered_code": self.render_code(render_markup), | |
450 | } | |
451 | ) | |
408 | 452 | |
409 | 453 | |
410 | 454 | class BlameView(BaseFileView): |
411 | template_name = 'blame_blob.html' | |
455 | template_name = "blame_blob.html" | |
412 | 456 | |
413 | 457 | def make_template_context(self, *args): |
414 | 458 | super(BlameView, self).make_template_context(*args) |
415 | if self.context['can_render']: | |
416 | line_commits = self.context['repo'].blame(self.context['commit'], self.context['path']) | |
459 | if self.context["can_render"]: | |
460 | line_commits = self.context["repo"].blame( | |
461 | self.context["commit"], self.context["path"] | |
462 | ) | |
417 | 463 | replace_dupes(line_commits, None) |
418 | self.context.update({ | |
419 | 'rendered_code': self.render_code(render_markup=False), | |
420 | 'line_commits': line_commits, | |
421 | }) | |
464 | self.context.update( | |
465 | { | |
466 | "rendered_code": self.render_code(render_markup=False), | |
467 | "line_commits": line_commits, | |
468 | } | |
469 | ) | |
422 | 470 | |
423 | 471 | |
424 | 472 | class RawView(BaseBlobView): |
425 | 473 | """Show a single file in raw for (as if it were a normal filesystem file |
426 | 474 | served through a static file server). |
427 | 475 | """ |
476 | ||
428 | 477 | def get_response(self): |
429 | 478 | # Explicitly set an empty mimetype. This should work well for most |
430 | 479 | # browsers as they do file type recognition anyway. |
431 | 480 | # The correct way would be to implement proper file type recognition here. |
432 | return Response(self.context['blob_or_tree'].chunked, mimetype='') | |
481 | return Response(self.context["blob_or_tree"].chunked, mimetype="") | |
433 | 482 | |
434 | 483 | |
435 | 484 | class DownloadView(BaseRepoView): |
436 | 485 | """Download a repo as a tar.gz file.""" |
486 | ||
437 | 487 | def get_response(self): |
438 | basename = "%s@%s" % (self.context['repo'].name, | |
439 | sanitize_branch_name(self.context['rev'])) | |
488 | basename = "%s@%s" % ( | |
489 | self.context["repo"].name, | |
490 | sanitize_branch_name(self.context["rev"]), | |
491 | ) | |
440 | 492 | tarname = basename + ".tar.gz" |
441 | 493 | headers = { |
442 | 'Content-Disposition': "attachment; filename=%s" % tarname, | |
443 | 'Cache-Control': "no-store", # Disables browser caching | |
494 | "Content-Disposition": "attachment; filename=%s" % tarname, | |
495 | "Cache-Control": "no-store", # Disables browser caching | |
444 | 496 | } |
445 | 497 | |
446 | 498 | tar_stream = dulwich.archive.tar_stream( |
447 | self.context['repo'], | |
448 | self.context['blob_or_tree'], | |
449 | self.context['commit'].commit_time, | |
499 | self.context["repo"], | |
500 | self.context["blob_or_tree"], | |
501 | self.context["commit"].commit_time, | |
450 | 502 | format="gz", |
451 | 503 | prefix=encode_for_git(basename), |
452 | 504 | ) |
453 | return Response( | |
454 | tar_stream, | |
455 | mimetype="application/x-tgz", | |
456 | headers=headers | |
457 | ) | |
458 | ||
459 | ||
460 | history = HistoryView.as_view('history', 'history') | |
461 | index = IndexView.as_view('index', 'index') | |
462 | commit = CommitView.as_view('commit', 'commit') | |
463 | patch = PatchView.as_view('patch', 'patch') | |
464 | blame = BlameView.as_view('blame', 'blame') | |
465 | blob = FileView.as_view('blob', 'blob') | |
466 | raw = RawView.as_view('raw', 'raw') | |
467 | download = DownloadView.as_view('download', 'download') | |
468 | submodule = SubmoduleView.as_view('submodule', 'submodule') | |
505 | return Response(tar_stream, mimetype="application/x-tgz", headers=headers) | |
506 | ||
507 | ||
508 | history = HistoryView.as_view("history", "history") | |
509 | index = IndexView.as_view("index", "index") | |
510 | commit = CommitView.as_view("commit", "commit") | |
511 | patch = PatchView.as_view("patch", "patch") | |
512 | blame = BlameView.as_view("blame", "blame") | |
513 | blob = FileView.as_view("blob", "blob") | |
514 | raw = RawView.as_view("raw", "raw") | |
515 | download = DownloadView.as_view("download", "download") | |
516 | submodule = SubmoduleView.as_view("submodule", "submodule") |
7 | 7 | # policy "install once, find never". Definitely a TODO! |
8 | 8 | # -- https://groups.google.com/group/comp.lang.python/msg/2105ee4d9e8042cb |
9 | 9 | from distutils.command.install import INSTALL_SCHEMES |
10 | ||
10 | 11 | for scheme in INSTALL_SCHEMES.values(): |
11 | scheme['data'] = scheme['purelib'] | |
12 | scheme["data"] = scheme["purelib"] | |
12 | 13 | |
13 | 14 | |
14 | 15 | install_data_files_hack() |
15 | 16 | |
16 | requires = ['six', 'flask', 'Werkzeug>=0.15.0', 'pygments', 'httpauth', 'humanize', 'dulwich>=0.19.3;python_version>="3.5"', 'dulwich>=0.19.3,<0.20;python_version<"3.5"'] | |
17 | requires = [ | |
18 | "six", | |
19 | "flask", | |
20 | "Werkzeug>=0.15.0", | |
21 | "pygments", | |
22 | "httpauth", | |
23 | "humanize", | |
24 | 'dulwich>=0.19.3;python_version>="3.5"', | |
25 | 'dulwich>=0.19.3,<0.20;python_version<"3.5"', | |
26 | ] | |
17 | 27 | |
18 | 28 | setup( |
19 | name='klaus', | |
20 | version='1.5.2', | |
21 | author='Jonas Haag', | |
22 | author_email='jonas@lophus.org', | |
23 | packages=['klaus', 'klaus.contrib'], | |
24 | scripts=['bin/klaus'], | |
29 | name="klaus", | |
30 | version="1.5.2", | |
31 | author="Jonas Haag", | |
32 | author_email="jonas@lophus.org", | |
33 | packages=["klaus", "klaus.contrib"], | |
34 | scripts=["bin/klaus"], | |
25 | 35 | include_package_data=True, |
26 | 36 | zip_safe=False, |
27 | url='https://github.com/jonashaag/klaus', | |
28 | description='The first Git web viewer that Just Worksâ„¢.', | |
37 | url="https://github.com/jonashaag/klaus", | |
38 | description="The first Git web viewer that Just Worksâ„¢.", | |
29 | 39 | long_description=__doc__, |
30 | 40 | classifiers=[ |
31 | 41 | "Development Status :: 5 - Production/Stable", |
40 | 50 | ], |
41 | 51 | install_requires=requires, |
42 | 52 | ) |
43 |
10 | 10 | def test_dont_show_blame_link(): |
11 | 11 | with serve(): |
12 | 12 | for file in ["binary", "image.jpg", "toolarge"]: |
13 | response = requests.get(TEST_REPO_DONT_RENDER_URL + "blob/HEAD/" + file).text | |
13 | response = requests.get( | |
14 | TEST_REPO_DONT_RENDER_URL + "blob/HEAD/" + file | |
15 | ).text | |
14 | 16 | assert "blame" not in response |
15 | 17 | |
16 | 18 | |
18 | 20 | """Don't render blame even if someone navigated to the blame site by accident.""" |
19 | 21 | with serve(): |
20 | 22 | for file in ["binary", "image.jpg", "toolarge"]: |
21 | response = requests.get(TEST_REPO_DONT_RENDER_URL + "blame/HEAD/" + file).text | |
23 | response = requests.get( | |
24 | TEST_REPO_DONT_RENDER_URL + "blame/HEAD/" + file | |
25 | ).text | |
22 | 26 | assert "Can't show blame" in response |
0 | 0 | import os |
1 | ||
1 | 2 | try: |
2 | 3 | from importlib import reload # Python 3.4+ |
3 | 4 | except ImportError: |
19 | 20 | |
20 | 21 | def test_minimum_env(monkeypatch): |
21 | 22 | """Test to provide only required env var""" |
22 | monkeypatch.setattr(os, 'environ', os.environ.copy()) | |
23 | monkeypatch.setattr(os, "environ", os.environ.copy()) | |
23 | 24 | check_env( |
24 | {'KLAUS_SITE_NAME': TEST_SITE_NAME}, | |
25 | {"KLAUS_SITE_NAME": TEST_SITE_NAME}, | |
25 | 26 | ([], TEST_SITE_NAME), |
26 | 27 | dict( |
27 | 28 | htdigest_file=None, |
29 | 30 | require_browser_auth=False, |
30 | 31 | disable_push=False, |
31 | 32 | unauthenticated_push=False, |
32 | ctags_policy='none') | |
33 | ctags_policy="none", | |
34 | ), | |
33 | 35 | ) |
34 | 36 | |
35 | 37 | |
36 | 38 | def test_complete_env(monkeypatch): |
37 | 39 | """Test to provide all supported env var""" |
38 | monkeypatch.setattr(os, 'environ', os.environ.copy()) | |
40 | monkeypatch.setattr(os, "environ", os.environ.copy()) | |
39 | 41 | check_env( |
40 | 42 | { |
41 | 'KLAUS_REPOS': TEST_REPO, | |
42 | 'KLAUS_SITE_NAME': TEST_SITE_NAME, | |
43 | 'KLAUS_HTDIGEST_FILE': HTDIGEST_FILE, | |
44 | 'KLAUS_USE_SMARTHTTP': 'yes', | |
45 | 'KLAUS_REQUIRE_BROWSER_AUTH': '1', | |
46 | 'KLAUS_DISABLE_PUSH': 'false', | |
47 | 'KLAUS_UNAUTHENTICATED_PUSH': '0', | |
48 | 'KLAUS_CTAGS_POLICY': 'ALL' | |
43 | "KLAUS_REPOS": TEST_REPO, | |
44 | "KLAUS_SITE_NAME": TEST_SITE_NAME, | |
45 | "KLAUS_HTDIGEST_FILE": HTDIGEST_FILE, | |
46 | "KLAUS_USE_SMARTHTTP": "yes", | |
47 | "KLAUS_REQUIRE_BROWSER_AUTH": "1", | |
48 | "KLAUS_DISABLE_PUSH": "false", | |
49 | "KLAUS_UNAUTHENTICATED_PUSH": "0", | |
50 | "KLAUS_CTAGS_POLICY": "ALL", | |
49 | 51 | }, |
50 | 52 | ([TEST_REPO], TEST_SITE_NAME), |
51 | 53 | dict( |
54 | 56 | require_browser_auth=True, |
55 | 57 | disable_push=False, |
56 | 58 | unauthenticated_push=False, |
57 | ctags_policy='ALL') | |
59 | ctags_policy="ALL", | |
60 | ), | |
58 | 61 | ) |
59 | 62 | |
60 | 63 | |
61 | 64 | def test_unsupported_boolean_env(monkeypatch): |
62 | 65 | """Test that unsupported boolean env var raises ValueError""" |
63 | monkeypatch.setattr(os, 'environ', os.environ.copy()) | |
66 | monkeypatch.setattr(os, "environ", os.environ.copy()) | |
64 | 67 | with pytest.raises(ValueError): |
65 | 68 | check_env( |
66 | 69 | { |
67 | 'KLAUS_REPOS': TEST_REPO, | |
68 | 'KLAUS_SITE_NAME': TEST_SITE_NAME, | |
69 | 'KLAUS_HTDIGEST_FILE': HTDIGEST_FILE, | |
70 | 'KLAUS_USE_SMARTHTTP': 'unsupported', | |
71 | }, (), {} | |
70 | "KLAUS_REPOS": TEST_REPO, | |
71 | "KLAUS_SITE_NAME": TEST_SITE_NAME, | |
72 | "KLAUS_HTDIGEST_FILE": HTDIGEST_FILE, | |
73 | "KLAUS_USE_SMARTHTTP": "unsupported", | |
74 | }, | |
75 | (), | |
76 | {}, | |
72 | 77 | ) |
73 | 78 | |
74 | 79 | |
75 | 80 | def test_wsgi(monkeypatch): |
76 | 81 | """Test start of wsgi app""" |
77 | monkeypatch.setattr(os, 'environ', os.environ.copy()) | |
78 | os.environ['KLAUS_REPOS'] = TEST_REPO | |
79 | os.environ['KLAUS_SITE_NAME'] = TEST_SITE_NAME | |
82 | monkeypatch.setattr(os, "environ", os.environ.copy()) | |
83 | os.environ["KLAUS_REPOS"] = TEST_REPO | |
84 | os.environ["KLAUS_SITE_NAME"] = TEST_SITE_NAME | |
80 | 85 | from klaus.contrib import wsgi |
86 | ||
81 | 87 | with serve_app(wsgi.application): |
82 | 88 | assert can_reach_unauth() |
83 | 89 | assert not can_push_auth() |
84 | 90 | |
85 | os.environ['KLAUS_HTDIGEST_FILE'] = HTDIGEST_FILE | |
86 | os.environ['KLAUS_USE_SMARTHTTP'] = 'yes' | |
91 | os.environ["KLAUS_HTDIGEST_FILE"] = HTDIGEST_FILE | |
92 | os.environ["KLAUS_USE_SMARTHTTP"] = "yes" | |
87 | 93 | reload(wsgi) |
88 | 94 | with serve_app(wsgi.application): |
89 | 95 | assert can_reach_unauth() |
92 | 98 | |
93 | 99 | def test_wsgi_autoreload(monkeypatch): |
94 | 100 | """Test start of wsgi autoreload app""" |
95 | monkeypatch.setattr(os, 'environ', os.environ.copy()) | |
96 | os.environ['KLAUS_REPOS_ROOT'] = TEST_REPO_ROOT | |
97 | os.environ['KLAUS_SITE_NAME'] = TEST_SITE_NAME | |
101 | monkeypatch.setattr(os, "environ", os.environ.copy()) | |
102 | os.environ["KLAUS_REPOS_ROOT"] = TEST_REPO_ROOT | |
103 | os.environ["KLAUS_SITE_NAME"] = TEST_SITE_NAME | |
98 | 104 | from klaus.contrib import wsgi_autoreload, wsgi_autoreloading |
105 | ||
99 | 106 | with serve_app(wsgi_autoreload.application): |
100 | 107 | assert can_reach_unauth() |
101 | 108 | assert not can_push_auth() |
102 | 109 | |
103 | os.environ['KLAUS_HTDIGEST_FILE'] = HTDIGEST_FILE | |
104 | os.environ['KLAUS_USE_SMARTHTTP'] = 'yes' | |
110 | os.environ["KLAUS_HTDIGEST_FILE"] = HTDIGEST_FILE | |
111 | os.environ["KLAUS_USE_SMARTHTTP"] = "yes" | |
105 | 112 | reload(wsgi_autoreload) |
106 | 113 | reload(wsgi_autoreloading) |
107 | 114 | with serve_app(wsgi_autoreload.application): |
19 | 19 | |
20 | 20 | def test_unauthenticated_push_and_require_browser_auth(): |
21 | 21 | with pytest.raises(ValueError): |
22 | klaus.make_app([], None, use_smarthttp=True, unauthenticated_push=True, require_browser_auth=True) | |
22 | klaus.make_app( | |
23 | [], | |
24 | None, | |
25 | use_smarthttp=True, | |
26 | unauthenticated_push=True, | |
27 | require_browser_auth=True, | |
28 | ) | |
23 | 29 | |
24 | 30 | |
25 | 31 | def test_unauthenticated_push_without_use_smarthttp(): |
38 | 44 | for check, permitted in expected_permissions.items(): |
39 | 45 | if check in globals(): |
40 | 46 | checks = [check] |
41 | elif check.endswith('auth'): | |
42 | checks = ['can_%s' % check] | |
47 | elif check.endswith("auth"): | |
48 | checks = ["can_%s" % check] | |
43 | 49 | else: |
44 | checks = ['can_%s_unauth' % check, 'can_%s_auth' % check] | |
50 | checks = ["can_%s_unauth" % check, "can_%s_auth" % check] | |
45 | 51 | for check in checks: |
46 | 52 | assert globals()[check]() == permitted |
53 | ||
47 | 54 | return test |
48 | 55 | |
49 | 56 | |
50 | test_nosmart_noauth = options_test( | |
51 | {}, | |
52 | {'reach': True, 'clone': False, 'push': False} | |
53 | ) | |
57 | test_nosmart_noauth = options_test({}, {"reach": True, "clone": False, "push": False}) | |
54 | 58 | test_smart_noauth = options_test( |
55 | {'use_smarthttp': True}, | |
56 | {'reach': True, 'clone': True, 'push': False} | |
59 | {"use_smarthttp": True}, {"reach": True, "clone": True, "push": False} | |
57 | 60 | ) |
58 | 61 | test_smart_push = options_test( |
59 | {'use_smarthttp': True, 'htdigest_file': open(HTDIGEST_FILE)}, | |
60 | {'reach': True, 'clone': True, 'push_auth': True, 'push_unauth': False} | |
62 | {"use_smarthttp": True, "htdigest_file": open(HTDIGEST_FILE)}, | |
63 | {"reach": True, "clone": True, "push_auth": True, "push_unauth": False}, | |
61 | 64 | ) |
62 | 65 | test_unauthenticated_push = options_test( |
63 | {'use_smarthttp': True, 'unauthenticated_push': True}, | |
64 | {'reach': True, 'clone': True, 'push': True} | |
66 | {"use_smarthttp": True, "unauthenticated_push": True}, | |
67 | {"reach": True, "clone": True, "push": True}, | |
65 | 68 | ) |
66 | 69 | test_nosmart_auth = options_test( |
67 | {'require_browser_auth': True, 'htdigest_file': open(HTDIGEST_FILE)}, | |
68 | {'reach_auth': True, 'reach_unauth': False, 'clone': False, 'push': False} | |
70 | {"require_browser_auth": True, "htdigest_file": open(HTDIGEST_FILE)}, | |
71 | {"reach_auth": True, "reach_unauth": False, "clone": False, "push": False}, | |
69 | 72 | ) |
70 | 73 | test_smart_auth = options_test( |
71 | {'require_browser_auth': True, 'use_smarthttp': True, 'htdigest_file': open(HTDIGEST_FILE)}, | |
72 | {'reach_auth': True, 'reach_unauth': False, 'clone_auth': True, 'clone_unauth': False, 'push_unauth': False, 'push_auth': True} | |
74 | { | |
75 | "require_browser_auth": True, | |
76 | "use_smarthttp": True, | |
77 | "htdigest_file": open(HTDIGEST_FILE), | |
78 | }, | |
79 | { | |
80 | "reach_auth": True, | |
81 | "reach_unauth": False, | |
82 | "clone_auth": True, | |
83 | "clone_unauth": False, | |
84 | "push_unauth": False, | |
85 | "push_auth": True, | |
86 | }, | |
73 | 87 | ) |
74 | 88 | test_smart_auth_disable_push = options_test( |
75 | {'require_browser_auth': True, 'use_smarthttp': True, 'disable_push': True, 'htdigest_file': open(HTDIGEST_FILE)}, | |
76 | {'reach_auth': True, 'reach_unauth': False, 'clone_auth': True, 'clone_unauth': False, 'push': False} | |
89 | { | |
90 | "require_browser_auth": True, | |
91 | "use_smarthttp": True, | |
92 | "disable_push": True, | |
93 | "htdigest_file": open(HTDIGEST_FILE), | |
94 | }, | |
95 | { | |
96 | "reach_auth": True, | |
97 | "reach_unauth": False, | |
98 | "clone_auth": True, | |
99 | "clone_unauth": False, | |
100 | "push": False, | |
101 | }, | |
77 | 102 | ) |
78 | 103 | |
79 | 104 | test_ctags_disabled = options_test( |
80 | {}, | |
81 | {'ctags_tags_and_branches': False, 'ctags_all': False} | |
105 | {}, {"ctags_tags_and_branches": False, "ctags_all": False} | |
82 | 106 | ) |
83 | 107 | test_ctags_tags_and_branches = options_test( |
84 | {'ctags_policy': 'tags-and-branches'}, | |
85 | {'ctags_tags_and_branches': True, 'ctags_all': False} | |
108 | {"ctags_policy": "tags-and-branches"}, | |
109 | {"ctags_tags_and_branches": True, "ctags_all": False}, | |
86 | 110 | ) |
87 | 111 | test_ctags_all = options_test( |
88 | {'ctags_policy': 'ALL'}, | |
89 | {'ctags_tags_and_branches': True, 'ctags_all': True} | |
112 | {"ctags_policy": "ALL"}, {"ctags_tags_and_branches": True, "ctags_all": True} | |
90 | 113 | ) |
91 | 114 | |
92 | 115 | |
94 | 117 | def can_reach_unauth(): |
95 | 118 | return _check_http200(_GET_unauth, "test_repo") |
96 | 119 | |
120 | ||
97 | 121 | def can_reach_auth(): |
98 | 122 | return _check_http200(_GET_auth, "test_repo") |
99 | 123 | |
102 | 126 | def can_clone_unauth(): |
103 | 127 | return _can_clone(_GET_unauth, UNAUTH_TEST_REPO_URL) |
104 | 128 | |
129 | ||
105 | 130 | def can_clone_auth(): |
106 | 131 | return _can_clone(_GET_auth, AUTH_TEST_REPO_URL) |
132 | ||
107 | 133 | |
108 | 134 | def _can_clone(http_get, url): |
109 | 135 | tmp = tempfile.mkdtemp() |
110 | 136 | try: |
111 | return any([ | |
112 | "git clone" in http_get(TEST_REPO_URL).text, | |
113 | _check_http200(http_get, TEST_REPO_URL + "info/refs?service=git-upload-pack"), | |
114 | subprocess.call(["git", "clone", url, tmp]) == 0, | |
115 | ]) | |
137 | return any( | |
138 | [ | |
139 | "git clone" in http_get(TEST_REPO_URL).text, | |
140 | _check_http200( | |
141 | http_get, TEST_REPO_URL + "info/refs?service=git-upload-pack" | |
142 | ), | |
143 | subprocess.call(["git", "clone", url, tmp]) == 0, | |
144 | ] | |
145 | ) | |
116 | 146 | finally: |
117 | 147 | shutil.rmtree(tmp, ignore_errors=True) |
118 | 148 | |
121 | 151 | def can_push_unauth(): |
122 | 152 | return _can_push(_GET_unauth, UNAUTH_TEST_REPO_URL) |
123 | 153 | |
154 | ||
124 | 155 | def can_push_auth(): |
125 | 156 | return _can_push(_GET_auth, AUTH_TEST_REPO_URL) |
126 | 157 | |
158 | ||
127 | 159 | def _can_push(http_get, url): |
128 | return any([ | |
129 | _check_http200(http_get, TEST_REPO_URL + "info/refs?service=git-receive-pack"), | |
130 | _check_http200(http_get, TEST_REPO_URL + "git-receive-pack"), | |
131 | subprocess.call(["git", "push", url, "master"], cwd=TEST_REPO) == 0, | |
132 | ]) | |
160 | return any( | |
161 | [ | |
162 | _check_http200( | |
163 | http_get, TEST_REPO_URL + "info/refs?service=git-receive-pack" | |
164 | ), | |
165 | _check_http200(http_get, TEST_REPO_URL + "git-receive-pack"), | |
166 | subprocess.call(["git", "push", url, "master"], cwd=TEST_REPO) == 0, | |
167 | ] | |
168 | ) | |
133 | 169 | |
134 | 170 | |
135 | 171 | # Ctags |
136 | 172 | def ctags_tags_and_branches(): |
137 | 173 | return all( |
138 | 174 | _ctags_enabled(ref, f) |
139 | for ref in ["master", "tag1"] for f in ["test.c", "test.js"] | |
175 | for ref in ["master", "tag1"] | |
176 | for f in ["test.c", "test.js"] | |
140 | 177 | ) |
141 | 178 | |
142 | 179 | |
143 | 180 | def ctags_all(): |
144 | all_refs = re.findall('href=".+/commit/([a-z0-9]{40})/">', | |
145 | requests.get(UNAUTH_TEST_REPO_URL).text) | |
181 | all_refs = re.findall( | |
182 | 'href=".+/commit/([a-z0-9]{40})/">', requests.get(UNAUTH_TEST_REPO_URL).text | |
183 | ) | |
146 | 184 | assert len(all_refs) == 3 |
147 | 185 | return all( |
148 | _ctags_enabled(ref, f) | |
149 | for ref in all_refs for f in ["test.c", "test.js"] | |
150 | ) | |
186 | _ctags_enabled(ref, f) for ref in all_refs for f in ["test.c", "test.js"] | |
187 | ) | |
188 | ||
151 | 189 | |
152 | 190 | def _ctags_enabled(ref, filename): |
153 | 191 | response = requests.get(UNAUTH_TEST_REPO_URL + "blob/%s/%s" % (ref, filename)) |
157 | 195 | |
158 | 196 | |
159 | 197 | def _GET_unauth(url=""): |
160 | return requests.get(UNAUTH_TEST_SERVER + url, auth=requests.auth.HTTPDigestAuth("invalid", "password")) | |
198 | return requests.get( | |
199 | UNAUTH_TEST_SERVER + url, | |
200 | auth=requests.auth.HTTPDigestAuth("invalid", "password"), | |
201 | ) | |
202 | ||
161 | 203 | |
162 | 204 | def _GET_auth(url=""): |
163 | return requests.get(AUTH_TEST_SERVER + url, auth=requests.auth.HTTPDigestAuth("testuser", "testpassword")) | |
205 | return requests.get( | |
206 | AUTH_TEST_SERVER + url, | |
207 | auth=requests.auth.HTTPDigestAuth("testuser", "testpassword"), | |
208 | ) | |
209 | ||
164 | 210 | |
165 | 211 | def _check_http200(http_get, url): |
166 | 212 | try: |
1 | 1 | import re |
2 | 2 | import subprocess |
3 | 3 | import klaus_cli |
4 | ||
4 | 5 | try: |
5 | 6 | from unittest import mock |
6 | 7 | except ImportError: |
12 | 13 | manpage = force_unicode(subprocess.check_output(["man", "./klaus.1"])) |
13 | 14 | |
14 | 15 | def assert_in_manpage(s): |
15 | clean = lambda x: re.sub('(.\\x08)|\\s', '', x) | |
16 | clean = lambda x: re.sub("(.\\x08)|\\s", "", x) | |
16 | 17 | assert clean(s) in clean(manpage), "%r not found in manpage" % s |
17 | 18 | |
18 | 19 | mock_parser = mock.Mock() |
19 | with mock.patch('argparse.ArgumentParser') as mock_cls: | |
20 | with mock.patch("argparse.ArgumentParser") as mock_cls: | |
20 | 21 | mock_cls.return_value = mock_parser |
21 | 22 | klaus_cli.make_parser() |
22 | 23 | |
23 | 24 | for args, kwargs in mock_parser.add_argument.call_args_list: |
24 | if kwargs.get('metavar') == 'DIR': | |
25 | if kwargs.get("metavar") == "DIR": | |
25 | 26 | continue |
26 | 27 | for string in args: |
27 | 28 | assert_in_manpage(string) |
28 | if 'help' in kwargs: | |
29 | assert_in_manpage(kwargs['help']) | |
30 | if 'choices' in kwargs: | |
31 | for choice in kwargs['choices']: | |
29 | if "help" in kwargs: | |
30 | assert_in_manpage(kwargs["help"]) | |
31 | if "choices" in kwargs: | |
32 | for choice in kwargs["choices"]: | |
32 | 33 | assert_in_manpage(choice) |
0 | 0 | import unittest |
1 | ||
1 | 2 | try: |
2 | 3 | from unittest import mock |
3 | 4 | except ImportError: |
7 | 8 | |
8 | 9 | |
9 | 10 | class ForceUnicodeTests(unittest.TestCase): |
10 | ||
11 | 11 | def test_ascii(self): |
12 | self.assertEqual(u'foo', utils.force_unicode(b'foo')) | |
12 | self.assertEqual(u"foo", utils.force_unicode(b"foo")) | |
13 | 13 | |
14 | 14 | def test_utf8(self): |
15 | self.assertEqual(u'f\xce', utils.force_unicode(b'f\xc3\x8e')) | |
15 | self.assertEqual(u"f\xce", utils.force_unicode(b"f\xc3\x8e")) | |
16 | 16 | |
17 | 17 | def test_invalid(self): |
18 | with mock.patch.object(utils, 'chardet', None): | |
19 | self.assertRaises( | |
20 | UnicodeDecodeError, utils.force_unicode, b'f\xce') | |
18 | with mock.patch.object(utils, "chardet", None): | |
19 | self.assertRaises(UnicodeDecodeError, utils.force_unicode, b"f\xce") | |
21 | 20 | |
22 | 21 | |
23 | 22 | class TarballBasenameTests(unittest.TestCase): |
24 | ||
25 | 23 | def test_examples(self): |
26 | 24 | examples = [ |
27 | ('v0.1', 'klaus-0.1'), | |
28 | ('klaus-0.1', 'klaus-0.1'), | |
29 | ('0.1', 'klaus-0.1'), | |
30 | ('b3e70e08344ca3f83cc7033ecdbefa90443d7d2e', | |
31 | 'klaus@b3e70e08344ca3f83cc7033ecdbefa90443d7d2e'), | |
32 | ('vanilla', 'klaus-vanilla'), | |
33 | ] | |
25 | ("v0.1", "klaus-0.1"), | |
26 | ("klaus-0.1", "klaus-0.1"), | |
27 | ("0.1", "klaus-0.1"), | |
28 | ( | |
29 | "b3e70e08344ca3f83cc7033ecdbefa90443d7d2e", | |
30 | "klaus@b3e70e08344ca3f83cc7033ecdbefa90443d7d2e", | |
31 | ), | |
32 | ("vanilla", "klaus-vanilla"), | |
33 | ] | |
34 | 34 | for (rev, basename) in examples: |
35 | self.assertEqual(utils.tarball_basename('klaus', rev), basename) | |
35 | self.assertEqual(utils.tarball_basename("klaus", rev), basename) |
10 | 10 | response_body = BytesIO(response.raw.read()) |
11 | 11 | tarball = tarfile.TarFile.gzopen("test.tar.gz", fileobj=response_body) |
12 | 12 | with contextlib.closing(tarball): |
13 | assert tarball.extractfile('test_repo@master/test.c').read() == b'int a;\n' | |
13 | assert tarball.extractfile("test_repo@master/test.c").read() == b"int a;\n" | |
14 | 14 | |
15 | 15 | |
16 | 16 | def test_no_newline_at_end_of_file(): |
43 | 43 | assert "blob/HEAD/test.txt" not in response |
44 | 44 | assert "blob/HEAD/folder/test.txt" in response |
45 | 45 | |
46 | ||
46 | 47 | def test_display_invalid_repos(): |
47 | 48 | with serve(): |
48 | 49 | response = requests.get(UNAUTH_TEST_SERVER).text |
49 | 50 | assert '<ul class="repolist invalid">' in response |
50 | assert '<div class=name>invalid_repo</div>' in response | |
51 | assert "<div class=name>invalid_repo</div>" in response |
24 | 24 | |
25 | 25 | TEST_INVALID_REPO = os.path.abspath("tests/repos/build/invalid_repo") |
26 | 26 | |
27 | ALL_TEST_REPOS = [TEST_REPO, TEST_REPO_NO_NEWLINE, TEST_REPO_DONT_RENDER, TEST_INVALID_REPO] | |
27 | ALL_TEST_REPOS = [ | |
28 | TEST_REPO, | |
29 | TEST_REPO_NO_NEWLINE, | |
30 | TEST_REPO_DONT_RENDER, | |
31 | TEST_INVALID_REPO, | |
32 | ] | |
28 | 33 | |
29 | 34 | |
30 | 35 | @contextlib.contextmanager |
44 | 49 | yield |
45 | 50 | finally: |
46 | 51 | server.server_close() |
47 | if 'TRAVIS' in os.environ: | |
52 | if "TRAVIS" in os.environ: | |
48 | 53 | # This fixes some "Address already in use" cases on Travis. |
49 | 54 | time.sleep(1) |
50 | 55 | |
51 | 56 | |
52 | 57 | def serve_require_auth(*args, **kwargs): |
53 | kwargs['htdigest_file'] = open(HTDIGEST_FILE) | |
54 | kwargs['require_browser_auth'] = True | |
58 | kwargs["htdigest_file"] = open(HTDIGEST_FILE) | |
59 | kwargs["require_browser_auth"] = True | |
55 | 60 | return testserver(*args, **kwargs) |
5 | 5 | from collections import defaultdict |
6 | 6 | import atexit |
7 | 7 | |
8 | ||
8 | 9 | def view_from_url(url): |
9 | 10 | try: |
10 | return url.split('/')[2] | |
11 | return url.split("/")[2] | |
11 | 12 | except IndexError: |
12 | 13 | return url |
14 | ||
13 | 15 | |
14 | 16 | AHREF_RE = re.compile('href="([\w/][^"]+)"') |
15 | 17 | |
17 | 19 | errors = defaultdict(set) |
18 | 20 | durations = defaultdict(list) |
19 | 21 | |
22 | ||
20 | 23 | def main(): |
21 | urls = {'/'} | |
24 | urls = {"/"} | |
22 | 25 | while urls: |
23 | 26 | try: |
24 | 27 | http_conn.close() |
25 | 28 | except NameError: |
26 | 29 | pass |
27 | http_conn = httplib.HTTPConnection('localhost', 8080) | |
30 | http_conn = httplib.HTTPConnection("localhost", 8080) | |
28 | 31 | url = urls.pop() |
29 | 32 | if url in seen: |
30 | 33 | continue |
31 | 34 | seen.add(url) |
32 | if url.startswith('http'): | |
35 | if url.startswith("http"): | |
33 | 36 | continue |
34 | if '-v' in sys.argv: | |
35 | print 'Requesting %r...' % url | |
37 | if "-v" in sys.argv: | |
38 | print "Requesting %r..." % url | |
36 | 39 | start = time.time() |
37 | http_conn.request('GET', url) | |
40 | http_conn.request("GET", url) | |
38 | 41 | response = http_conn.getresponse() |
39 | 42 | durations[view_from_url(url)].append(time.time() - start) |
40 | 43 | status = str(response.status) |
41 | if status[0] == '3': | |
42 | urls.add(response.getheader('Location')) | |
43 | elif status[0] == '2': | |
44 | if not '/raw/' in url: | |
44 | if status[0] == "3": | |
45 | urls.add(response.getheader("Location")) | |
46 | elif status[0] == "2": | |
47 | if not "/raw/" in url: | |
45 | 48 | html = response.read() |
46 | html = re.sub('<pre>.*?</pre>', '', html) | |
49 | html = re.sub("<pre>.*?</pre>", "", html) | |
47 | 50 | urls.update(AHREF_RE.findall(html)) |
48 | 51 | else: |
49 | if '--failfast' in sys.argv: | |
52 | if "--failfast" in sys.argv: | |
50 | 53 | print url, status |
51 | 54 | exit(1) |
52 | 55 | errors[status].add(url) |
53 | 56 | |
57 | ||
54 | 58 | def print_stats(): |
55 | 59 | import pprint |
56 | print len(seen) | |
60 | ||
61 | print (len(seen)) | |
57 | 62 | pprint.pprint(dict(errors)) |
58 | print {url: sum(times)/len(times) for url, times in durations.iteritems()} | |
63 | print ({url: sum(times) / len(times) for url, times in durations.iteritems()}) | |
64 | ||
65 | ||
59 | 66 | atexit.register(print_stats) |
60 | 67 | |
61 | 68 | main() |