[495079]: doc / source / bin / get_links.py  Maximize  Restore  History

Download this file

68 lines (54 with data), 2.1 kB

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
#!/usr/bin/env python
# get_links.py
from __future__ import with_statement
import os
import os.path
def run_command(start_dir, outfilename):
dir = start_dir
links_seen = set()
def doctor(link_dir, path):
# Don't mess with paths that just refer to another link:
if path.rstrip()[-1] == '_': return path
path = path.lstrip()
# Don't mess with paths that point somewhere in the outside universe:
if path.startswith('http://'): return ' ' + path
# Prepend link_dir to path
if link_dir.startswith('./'): path = link_dir[2:] + '/' + path
elif link_dir != '.': path = link_dir + '/' + path
# Prepare dir (start_dir, minus initial './')
if start_dir == '.': dir = ''
elif start_dir.startswith('./'): dir = start_dir[2:]
else: dir = start_dir
rest=' '
last_dir = None
while dir and dir != last_dir:
if path.startswith(dir + '/'):
ans = rest + path[len(dir) + 1:]
#print "doctor(%s) abbr:" % (path.rstrip(),), ans
return ans
rest += '../'
last_dir = dir
dir, ignore = os.path.split(dir)
ans = rest + path
#print "doctor(%s) abs:" % (path.rstrip(),), ans
return ans
with open(outfilename, "w") as outfile:
outfile.write("\n")
while True:
try:
with open(os.path.join(dir, 'links')) as links:
for line in links:
link, path = line.split(':', 1)
if link not in links_seen:
links_seen.add(link)
outfile.write(":".join((link, doctor(dir, path))))
except IOError:
pass
if dir == '.': break
dir = os.path.dirname(dir)
if __name__ == "__main__":
import sys
if len(sys.argv) != 3:
print >> sys.stderr, "usage: get_links.py dir outfile"
sys.exit(2)
run_command(sys.argv[1], sys.argv[2])

Get latest updates about Open Source Projects, Conferences and News.

Sign up for the SourceForge newsletter:





No, thanks