1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
|
#!/usr/bin/python
import fcntl
import logging
import os
import operator
import sys
from bin import django_setup, add_logging_arguments
django_setup() # must be called to get sys.path and django settings in place
from patchwork.models import Project, State
import gitrepo
import patch_matcher
from importlib import import_module
log = logging.getLogger('update_commited_patches')
_here = os.path.abspath(os.path.dirname(__file__))
def _assert_repo_dir(path):
if not os.path.exists(path):
os.mkdir(path)
def _update_commit(project, repo, commit, dryrun):
patches = patch_matcher.get_patches_matching_commit(project, repo, commit)
accepted = State.objects.get(name='Accepted')
superseded = State.objects.get(name='Superseded')
# sort the patches newest to oldest. the most recent gets marked as
# accepted and everything else is superseded.
patches = sorted(patches, key=operator.attrgetter('date'), reverse=True)
for i, patch in enumerate(patches):
if i == 0:
patch.state = accepted
patch.commit_ref = commit.id
else:
patch.state = superseded
log.info('Updating patch %s, commit: %s, state: %s',
patch, commit.id, patch.state.name)
if not dryrun:
patch.save()
def _update_project(cb, repo_dir, project, commits, dryrun):
log.info('Checking for updates to %s', project.linkname)
repo = gitrepo.Repo(repo_dir, project.linkname, project.scm_url)
repo.update()
if commits:
commits = [repo[x] for x in commits]
else:
save_state = dryrun is False
commits = repo.process_unchecked_commits(save_state)
for commit in commits:
log.debug('check commit: %s', commit.id)
try:
_update_commit(project, repo, commit, dryrun)
if cb:
cb(project, repo, commit, dryrun)
except MemoryError as e:
log.error('Unable to process commit(%s) because of size: %s',
commit.id, e)
except Exception as e:
log.error('Unable to process commit(%s): %s', commit.id, e)
def get_commit_callback_constructor():
p = getattr(settings, 'UPDATE_COMMIT_CALLBACK', None)
if p:
module, func = p.rsplit(':', 1)
module = import_module(module)
return getattr(module, func)
if __name__ == '__main__':
import argparse
from django.conf import settings
parser = argparse.ArgumentParser(
description='Find patches that have been committed upstream')
parser.add_argument('--dryrun', action='store_true',
help='Run through without changing anything in the DB')
add_logging_arguments(parser)
parser.add_argument('project', nargs='?',
help='only check on specific project')
parser.add_argument('commit_id', nargs='*',
help='only check on given commit(s) for a project')
args = parser.parse_args()
_assert_repo_dir(settings.REPO_DIR)
# Ensure no other copy of this script is running
f = open(os.path.join(settings.REPO_DIR, '.lock'), 'w+')
try:
fcntl.flock(f, fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError:
sys.exit('Script is already running')
if args.project:
projects = [Project.objects.get(linkname=args.project)]
else:
projects = Project.objects.filter(
scm_url__isnull=False).exclude(scm_url='')
cb_constructor = get_commit_callback_constructor()
if cb_constructor:
with cb_constructor() as cb:
for p in projects:
try:
_update_project(
cb, settings.REPO_DIR, p, args.commit_id, args.dryrun
)
except Exception:
log.exception('Error updating commits for: %s', p)
if not cb_constructor:
for p in projects:
try:
_update_project(
None, settings.REPO_DIR, p, args.commit_id, args.dryrun
)
except Exception:
log.exception('Error updating commits for: %s', p)
|