| #!/usr/bin/python |
| |
| import cookielib |
| import os |
| import re |
| import urllib2 |
| import sys |
| import json |
| import urlparse |
| import snapshotsapi |
| |
| def cookie_setup(): |
| cookies = os.getenv('LMC_COOKIES') |
| if cookies: |
| cj = cookielib.LWPCookieJar() |
| opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) |
| opener.addheaders.append(('Cookie', cookies)) |
| urllib2.install_opener(opener) |
| |
| def list_links(url, regex=""): |
| try: |
| api = snapshotsapi.ApiUrls(url) |
| response = urllib2.urlopen(api.ls()) |
| listing = json.loads(response.read())["files"] |
| links = [] |
| for file_info in listing: |
| # we do not care about /latest/ - we want numbers |
| if file_info["name"] == "latest": |
| continue |
| |
| if regex == "" or re.match(regex, file_info["name"]): |
| # for folders we will dig deeper but not for files |
| if file_info["type"] == "folder": |
| links.append([int(file_info["name"]), file_info["url"]]) |
| else: |
| links.append([file_info["name"], file_info["name"]]) |
| |
| except urllib2.HTTPError,e: |
| print "ERROR: finding links for (%s): %s" % (url, e) |
| links = [] #return empty array |
| return links |
| |
| def list_hwpack(url): |
| ''' returns tuple of (buildate, url) |
| ''' |
| urls = list_links(url, r'.*hwpack.*?\.tar\.gz') |
| for link in urls: |
| try: |
| build_date = re.compile('_(\d+)-').findall(link[1]) |
| return (build_date[0], '%s/%s' % (url,link[1])) |
| except: |
| return None |
| return None |
| |
| def latest_hwpacks(url, limit=7): |
| '''returns an array of tuples (build-date, hwpack url) like: |
| [ (20120210, http://foo.bar/hwpack.tar.gz), (20120209, blah.tar.gz) ] |
| ''' |
| # only analyze the last few builds |
| links = list_links(url) |
| links = sorted(links, reverse=True)[:limit] |
| hwpacks = [] |
| for link in links: |
| build = list_hwpack('%s/%s'% (url, link[0])) |
| if build is not None: |
| hwpacks.append(build) |
| return hwpacks |
| |
| def list_rfs(url): |
| links = list_links(url, r'.*(?!config)(?:rootfs\.)?tar\.gz') |
| if len(links): |
| return "%s/%s" %(url,links[0][0]) |
| return None |
| |
| def latest_rfs(url, limit=7): |
| ''' |
| Returns a tuple of (builddate, url) |
| ''' |
| # only analyze the last few builds |
| links = list_links(url) |
| links = sorted(links, reverse=True)[:limit] |
| for link in links: |
| build = list_rfs('%s/%s' %(url, link[0])) |
| if build is not None: |
| return (link[0], build) |
| |
| return None |
| |
| if __name__ == '__main__': |
| cookie_setup() |
| |
| for arg in sys.argv[1:]: |
| print "HWPACKS for: %s" % arg |
| hwpacks = latest_hwpacks(arg, 4) |
| for hwpack in hwpacks: |
| print " %s: %s" % hwpack |
| |
| print "latest nano:" |
| print " %s %s" % latest_rfs('http://snapshots.linaro.org/ubuntu/images/nano') |