123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050 |
- # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
- # Use of this source code is governed by a BSD-style license that can be
- # found in the LICENSE file.
- """
- Utilities for requesting information for a gerrit server via https.
- https://gerrit-review.googlesource.com/Documentation/rest-api.html
- """
- import base64
- import contextlib
- import cookielib
- import httplib # Still used for its constants.
- import json
- import logging
- import netrc
- import os
- import random
- import re
- import socket
- import stat
- import sys
- import tempfile
- import time
- import urllib
- import urlparse
- from cStringIO import StringIO
- from multiprocessing.pool import ThreadPool
- import auth
- import gclient_utils
- import metrics
- import metrics_utils
- import subprocess2
- from third_party import httplib2
- LOGGER = logging.getLogger()
- # With a starting sleep time of 1.5 seconds, 2^n exponential backoff, and seven
- # total tries, the sleep time between the first and last tries will be 94.5 sec.
- # TODO(crbug.com/881860): Lower this when crbug.com/877717 is fixed.
- TRY_LIMIT = 7
- # Controls the transport protocol used to communicate with gerrit.
- # This is parameterized primarily to enable GerritTestCase.
- GERRIT_PROTOCOL = 'https'
- # TODO(crbug.com/881860): Remove.
- GERRIT_ERR_LOGGER = logging.getLogger('GerritErrorLogs')
- GERRIT_ERR_LOG_FILE = os.path.join(tempfile.gettempdir(), 'GerritHeaders.txt')
- GERRIT_ERR_MESSAGE = (
- 'If you see this when running \'git cl upload\', please report this to '
- 'https://crbug.com/881860, and attach the failures in %s.\n' %
- GERRIT_ERR_LOG_FILE)
- INTERESTING_HEADERS = frozenset([
- 'x-google-backends',
- 'x-google-errorfiltertrace',
- 'x-google-filter-grace',
- 'x-errorid',
- ])
- class GerritError(Exception):
- """Exception class for errors commuicating with the gerrit-on-borg service."""
- def __init__(self, http_status, *args, **kwargs):
- super(GerritError, self).__init__(*args, **kwargs)
- self.http_status = http_status
- self.message = '(%d) %s' % (self.http_status, self.message)
- class GerritAuthenticationError(GerritError):
- """Exception class for authentication errors during Gerrit communication."""
- def _QueryString(params, first_param=None):
- """Encodes query parameters in the key:val[+key:val...] format specified here:
- https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#list-changes
- """
- q = [urllib.quote(first_param)] if first_param else []
- q.extend(['%s:%s' % (key, val) for key, val in params])
- return '+'.join(q)
- def GetConnectionObject(protocol=None):
- if protocol is None:
- protocol = GERRIT_PROTOCOL
- if protocol in ('http', 'https'):
- return httplib2.Http()
- else:
- raise RuntimeError(
- "Don't know how to work with protocol '%s'" % protocol)
- class Authenticator(object):
- """Base authenticator class for authenticator implementations to subclass."""
- def get_auth_header(self, host):
- raise NotImplementedError()
- @staticmethod
- def get():
- """Returns: (Authenticator) The identified Authenticator to use.
- Probes the local system and its environment and identifies the
- Authenticator instance to use.
- """
- # LUCI Context takes priority since it's normally present only on bots,
- # which then must use it.
- if LuciContextAuthenticator.is_luci():
- return LuciContextAuthenticator()
- if GceAuthenticator.is_gce():
- return GceAuthenticator()
- return CookiesAuthenticator()
- class CookiesAuthenticator(Authenticator):
- """Authenticator implementation that uses ".netrc" or ".gitcookies" for token.
- Expected case for developer workstations.
- """
- _EMPTY = object()
- def __init__(self):
- # Credentials will be loaded lazily on first use. This ensures Authenticator
- # get() can always construct an authenticator, even if something is broken.
- # This allows 'creds-check' to proceed to actually checking creds later,
- # rigorously (instead of blowing up with a cryptic error if they are wrong).
- self._netrc = self._EMPTY
- self._gitcookies = self._EMPTY
- @property
- def netrc(self):
- if self._netrc is self._EMPTY:
- self._netrc = self._get_netrc()
- return self._netrc
- @property
- def gitcookies(self):
- if self._gitcookies is self._EMPTY:
- self._gitcookies = self._get_gitcookies()
- return self._gitcookies
- @classmethod
- def get_new_password_url(cls, host):
- assert not host.startswith('http')
- # Assume *.googlesource.com pattern.
- parts = host.split('.')
- if not parts[0].endswith('-review'):
- parts[0] += '-review'
- return 'https://%s/new-password' % ('.'.join(parts))
- @classmethod
- def get_new_password_message(cls, host):
- assert not host.startswith('http')
- # Assume *.googlesource.com pattern.
- parts = host.split('.')
- if not parts[0].endswith('-review'):
- parts[0] += '-review'
- url = 'https://%s/new-password' % ('.'.join(parts))
- return 'You can (re)generate your credentials by visiting %s' % url
- @classmethod
- def get_netrc_path(cls):
- path = '_netrc' if sys.platform.startswith('win') else '.netrc'
- return os.path.expanduser(os.path.join('~', path))
- @classmethod
- def _get_netrc(cls):
- # Buffer the '.netrc' path. Use an empty file if it doesn't exist.
- path = cls.get_netrc_path()
- if not os.path.exists(path):
- return netrc.netrc(os.devnull)
- st = os.stat(path)
- if st.st_mode & (stat.S_IRWXG | stat.S_IRWXO):
- print >> sys.stderr, (
- 'WARNING: netrc file %s cannot be used because its file '
- 'permissions are insecure. netrc file permissions should be '
- '600.' % path)
- with open(path) as fd:
- content = fd.read()
- # Load the '.netrc' file. We strip comments from it because processing them
- # can trigger a bug in Windows. See crbug.com/664664.
- content = '\n'.join(l for l in content.splitlines()
- if l.strip() and not l.strip().startswith('#'))
- with tempdir() as tdir:
- netrc_path = os.path.join(tdir, 'netrc')
- with open(netrc_path, 'w') as fd:
- fd.write(content)
- os.chmod(netrc_path, (stat.S_IRUSR | stat.S_IWUSR))
- return cls._get_netrc_from_path(netrc_path)
- @classmethod
- def _get_netrc_from_path(cls, path):
- try:
- return netrc.netrc(path)
- except IOError:
- print >> sys.stderr, 'WARNING: Could not read netrc file %s' % path
- return netrc.netrc(os.devnull)
- except netrc.NetrcParseError as e:
- print >> sys.stderr, ('ERROR: Cannot use netrc file %s due to a '
- 'parsing error: %s' % (path, e))
- return netrc.netrc(os.devnull)
- @classmethod
- def get_gitcookies_path(cls):
- if os.getenv('GIT_COOKIES_PATH'):
- return os.getenv('GIT_COOKIES_PATH')
- try:
- return subprocess2.check_output(
- ['git', 'config', '--path', 'http.cookiefile']).strip()
- except subprocess2.CalledProcessError:
- return os.path.join(os.environ['HOME'], '.gitcookies')
- @classmethod
- def _get_gitcookies(cls):
- gitcookies = {}
- path = cls.get_gitcookies_path()
- if not os.path.exists(path):
- return gitcookies
- try:
- f = open(path, 'rb')
- except IOError:
- return gitcookies
- with f:
- for line in f:
- try:
- fields = line.strip().split('\t')
- if line.strip().startswith('#') or len(fields) != 7:
- continue
- domain, xpath, key, value = fields[0], fields[2], fields[5], fields[6]
- if xpath == '/' and key == 'o':
- if value.startswith('git-'):
- login, secret_token = value.split('=', 1)
- gitcookies[domain] = (login, secret_token)
- else:
- gitcookies[domain] = ('', value)
- except (IndexError, ValueError, TypeError) as exc:
- LOGGER.warning(exc)
- return gitcookies
- def _get_auth_for_host(self, host):
- for domain, creds in self.gitcookies.iteritems():
- if cookielib.domain_match(host, domain):
- return (creds[0], None, creds[1])
- return self.netrc.authenticators(host)
- def get_auth_header(self, host):
- a = self._get_auth_for_host(host)
- if a:
- if a[0]:
- return 'Basic %s' % (base64.b64encode('%s:%s' % (a[0], a[2])))
- else:
- return 'Bearer %s' % a[2]
- return None
- def get_auth_email(self, host):
- """Best effort parsing of email to be used for auth for the given host."""
- a = self._get_auth_for_host(host)
- if not a:
- return None
- login = a[0]
- # login typically looks like 'git-xxx.example.com'
- if not login.startswith('git-') or '.' not in login:
- return None
- username, domain = login[len('git-'):].split('.', 1)
- return '%s@%s' % (username, domain)
- # Backwards compatibility just in case somebody imports this outside of
- # depot_tools.
- NetrcAuthenticator = CookiesAuthenticator
- class GceAuthenticator(Authenticator):
- """Authenticator implementation that uses GCE metadata service for token.
- """
- _INFO_URL = 'http://metadata.google.internal'
- _ACQUIRE_URL = ('%s/computeMetadata/v1/instance/'
- 'service-accounts/default/token' % _INFO_URL)
- _ACQUIRE_HEADERS = {"Metadata-Flavor": "Google"}
- _cache_is_gce = None
- _token_cache = None
- _token_expiration = None
- @classmethod
- def is_gce(cls):
- if os.getenv('SKIP_GCE_AUTH_FOR_GIT'):
- return False
- if cls._cache_is_gce is None:
- cls._cache_is_gce = cls._test_is_gce()
- return cls._cache_is_gce
- @classmethod
- def _test_is_gce(cls):
- # Based on https://cloud.google.com/compute/docs/metadata#runninggce
- try:
- resp, _ = cls._get(cls._INFO_URL)
- except (socket.error, httplib2.ServerNotFoundError,
- httplib2.socks.HTTPError):
- # Could not resolve URL.
- return False
- return resp.get('metadata-flavor') == 'Google'
- @staticmethod
- def _get(url, **kwargs):
- next_delay_sec = 1
- for i in xrange(TRY_LIMIT):
- p = urlparse.urlparse(url)
- c = GetConnectionObject(protocol=p.scheme)
- resp, contents = c.request(url, 'GET', **kwargs)
- LOGGER.debug('GET [%s] #%d/%d (%d)', url, i+1, TRY_LIMIT, resp.status)
- if resp.status < httplib.INTERNAL_SERVER_ERROR:
- return (resp, contents)
- # Retry server error status codes.
- LOGGER.warn('Encountered server error')
- if TRY_LIMIT - i > 1:
- LOGGER.info('Will retry in %d seconds (%d more times)...',
- next_delay_sec, TRY_LIMIT - i - 1)
- time.sleep(next_delay_sec)
- next_delay_sec *= 2
- @classmethod
- def _get_token_dict(cls):
- if cls._token_cache:
- # If it expires within 25 seconds, refresh.
- if cls._token_expiration < time.time() - 25:
- return cls._token_cache
- resp, contents = cls._get(cls._ACQUIRE_URL, headers=cls._ACQUIRE_HEADERS)
- if resp.status != httplib.OK:
- return None
- cls._token_cache = json.loads(contents)
- cls._token_expiration = cls._token_cache['expires_in'] + time.time()
- return cls._token_cache
- def get_auth_header(self, _host):
- token_dict = self._get_token_dict()
- if not token_dict:
- return None
- return '%(token_type)s %(access_token)s' % token_dict
- class LuciContextAuthenticator(Authenticator):
- """Authenticator implementation that uses LUCI_CONTEXT ambient local auth.
- """
- @staticmethod
- def is_luci():
- return auth.has_luci_context_local_auth()
- def __init__(self):
- self._access_token = None
- self._ensure_fresh()
- def _ensure_fresh(self):
- if not self._access_token or self._access_token.needs_refresh():
- self._access_token = auth.get_luci_context_access_token(
- scopes=' '.join([auth.OAUTH_SCOPE_EMAIL, auth.OAUTH_SCOPE_GERRIT]))
- def get_auth_header(self, _host):
- self._ensure_fresh()
- return 'Bearer %s' % self._access_token.token
- def CreateHttpConn(host, path, reqtype='GET', headers=None, body=None):
- """Opens an https connection to a gerrit service, and sends a request."""
- headers = headers or {}
- bare_host = host.partition(':')[0]
- a = Authenticator.get().get_auth_header(bare_host)
- if a:
- headers.setdefault('Authorization', a)
- else:
- LOGGER.debug('No authorization found for %s.' % bare_host)
- url = path
- if not url.startswith('/'):
- url = '/' + url
- if 'Authorization' in headers and not url.startswith('/a/'):
- url = '/a%s' % url
- if body:
- body = json.JSONEncoder().encode(body)
- headers.setdefault('Content-Type', 'application/json')
- if LOGGER.isEnabledFor(logging.DEBUG):
- LOGGER.debug('%s %s://%s%s' % (reqtype, GERRIT_PROTOCOL, host, url))
- for key, val in headers.iteritems():
- if key == 'Authorization':
- val = 'HIDDEN'
- LOGGER.debug('%s: %s' % (key, val))
- if body:
- LOGGER.debug(body)
- conn = GetConnectionObject()
- # HACK: httplib.Http has no such attribute; we store req_host here for later
- # use in ReadHttpResponse.
- conn.req_host = host
- conn.req_params = {
- 'uri': urlparse.urljoin('%s://%s' % (GERRIT_PROTOCOL, host), url),
- 'method': reqtype,
- 'headers': headers,
- 'body': body,
- }
- return conn
- def ReadHttpResponse(conn, accept_statuses=frozenset([200])):
- """Reads an http response from a connection into a string buffer.
- Args:
- conn: An Http object created by CreateHttpConn above.
- accept_statuses: Treat any of these statuses as success. Default: [200]
- Common additions include 204, 400, and 404.
- Returns: A string buffer containing the connection's reply.
- """
- sleep_time = 1.5
- failed = False
- for idx in range(TRY_LIMIT):
- before_response = time.time()
- response, contents = conn.request(**conn.req_params)
- response_time = time.time() - before_response
- metrics.collector.add_repeated(
- 'http_requests',
- metrics_utils.extract_http_metrics(
- conn.req_params['uri'], conn.req_params['method'], response.status,
- response_time))
- # Check if this is an authentication issue.
- www_authenticate = response.get('www-authenticate')
- if (response.status in (httplib.UNAUTHORIZED, httplib.FOUND) and
- www_authenticate):
- auth_match = re.search('realm="([^"]+)"', www_authenticate, re.I)
- host = auth_match.group(1) if auth_match else conn.req_host
- reason = ('Authentication failed. Please make sure your .gitcookies file '
- 'has credentials for %s' % host)
- raise GerritAuthenticationError(response.status, reason)
- # If response.status < 500 then the result is final; break retry loop.
- # If the response is 404/409, it might be because of replication lag, so
- # keep trying anyway.
- if ((response.status < 500 and response.status not in [404, 409])
- or response.status in accept_statuses):
- LOGGER.debug('got response %d for %s %s', response.status,
- conn.req_params['method'], conn.req_params['uri'])
- # If 404 was in accept_statuses, then it's expected that the file might
- # not exist, so don't return the gitiles error page because that's not the
- # "content" that was actually requested.
- if response.status == 404:
- contents = ''
- break
- if response.status == 404:
- # TODO(crbug/881860): remove this hack.
- # HACK: try different Gerrit mirror as a workaround for potentially
- # out-of-date mirror hit through default routing.
- if conn.req_host == 'chromium-review.googlesource.com':
- conn.req_params['uri'] = _UseGerritMirror(
- conn.req_params['uri'], 'chromium-review.googlesource.com')
- # And don't increase sleep_time in this case, since we suspect we've
- # just asked wrong git mirror before.
- sleep_time /= 2.0
- failed = True
- rpc_headers = '\n'.join(
- ' ' + header + ': ' + value
- for header, value in response.iteritems()
- if header.lower() in INTERESTING_HEADERS
- )
- GERRIT_ERR_LOGGER.info(
- 'Gerrit RPC failure headers:\n'
- ' Host: %s\n'
- ' Ip: %s\n'
- '%s\n',
- conn.connections.values()[0].host,
- conn.connections.values()[0].sock.getpeername(),
- rpc_headers)
- else:
- # A status >=500 is assumed to be a possible transient error; retry.
- http_version = 'HTTP/%s' % ('1.1' if response.version == 11 else '1.0')
- LOGGER.warn('A transient error occurred while querying %s:\n'
- '%s %s %s\n'
- '%s %d %s',
- conn.req_host, conn.req_params['method'],
- conn.req_params['uri'],
- http_version, http_version, response.status, response.reason)
- if TRY_LIMIT - idx > 1:
- LOGGER.info('Will retry in %d seconds (%d more times)...',
- sleep_time, TRY_LIMIT - idx - 1)
- time.sleep(sleep_time)
- sleep_time = sleep_time * 2
- # end of retries loop
- if failed:
- LOGGER.warn(GERRIT_ERR_MESSAGE)
- if response.status not in accept_statuses:
- if response.status in (401, 403):
- print('Your Gerrit credentials might be misconfigured. Try: \n'
- ' git cl creds-check')
- reason = '%s: %s' % (response.reason, contents)
- if failed:
- reason += '\n' + GERRIT_ERR_MESSAGE
- raise GerritError(response.status, reason)
- return StringIO(contents)
- def ReadHttpJsonResponse(conn, accept_statuses=frozenset([200])):
- """Parses an https response as json."""
- fh = ReadHttpResponse(conn, accept_statuses)
- # The first line of the response should always be: )]}'
- s = fh.readline()
- if s and s.rstrip() != ")]}'":
- raise GerritError(200, 'Unexpected json output: %s' % s)
- s = fh.read()
- if not s:
- return None
- return json.loads(s)
- def QueryChanges(host, params, first_param=None, limit=None, o_params=None,
- start=None):
- """
- Queries a gerrit-on-borg server for changes matching query terms.
- Args:
- params: A list of key:value pairs for search parameters, as documented
- here (e.g. ('is', 'owner') for a parameter 'is:owner'):
- https://gerrit-review.googlesource.com/Documentation/user-search.html#search-operators
- first_param: A change identifier
- limit: Maximum number of results to return.
- start: how many changes to skip (starting with the most recent)
- o_params: A list of additional output specifiers, as documented here:
- https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#list-changes
- Returns:
- A list of json-decoded query results.
- """
- # Note that no attempt is made to escape special characters; YMMV.
- if not params and not first_param:
- raise RuntimeError('QueryChanges requires search parameters')
- path = 'changes/?q=%s' % _QueryString(params, first_param)
- if start:
- path = '%s&start=%s' % (path, start)
- if limit:
- path = '%s&n=%d' % (path, limit)
- if o_params:
- path = '%s&%s' % (path, '&'.join(['o=%s' % p for p in o_params]))
- return ReadHttpJsonResponse(CreateHttpConn(host, path))
- def GenerateAllChanges(host, params, first_param=None, limit=500,
- o_params=None, start=None):
- """
- Queries a gerrit-on-borg server for all the changes matching the query terms.
- WARNING: this is unreliable if a change matching the query is modified while
- this function is being called.
- A single query to gerrit-on-borg is limited on the number of results by the
- limit parameter on the request (see QueryChanges) and the server maximum
- limit.
- Args:
- params, first_param: Refer to QueryChanges().
- limit: Maximum number of requested changes per query.
- o_params: Refer to QueryChanges().
- start: Refer to QueryChanges().
- Returns:
- A generator object to the list of returned changes.
- """
- already_returned = set()
- def at_most_once(cls):
- for cl in cls:
- if cl['_number'] not in already_returned:
- already_returned.add(cl['_number'])
- yield cl
- start = start or 0
- cur_start = start
- more_changes = True
- while more_changes:
- # This will fetch changes[start..start+limit] sorted by most recently
- # updated. Since the rank of any change in this list can be changed any time
- # (say user posting comment), subsequent calls may overalp like this:
- # > initial order ABCDEFGH
- # query[0..3] => ABC
- # > E get's updated. New order: EABCDFGH
- # query[3..6] => CDF # C is a dup
- # query[6..9] => GH # E is missed.
- page = QueryChanges(host, params, first_param, limit, o_params,
- cur_start)
- for cl in at_most_once(page):
- yield cl
- more_changes = [cl for cl in page if '_more_changes' in cl]
- if len(more_changes) > 1:
- raise GerritError(
- 200,
- 'Received %d changes with a _more_changes attribute set but should '
- 'receive at most one.' % len(more_changes))
- if more_changes:
- cur_start += len(page)
- # If we paged through, query again the first page which in most circumstances
- # will fetch all changes that were modified while this function was run.
- if start != cur_start:
- page = QueryChanges(host, params, first_param, limit, o_params, start)
- for cl in at_most_once(page):
- yield cl
- def MultiQueryChanges(host, params, change_list, limit=None, o_params=None,
- start=None):
- """Initiate a query composed of multiple sets of query parameters."""
- if not change_list:
- raise RuntimeError(
- "MultiQueryChanges requires a list of change numbers/id's")
- q = ['q=%s' % '+OR+'.join([urllib.quote(str(x)) for x in change_list])]
- if params:
- q.append(_QueryString(params))
- if limit:
- q.append('n=%d' % limit)
- if start:
- q.append('S=%s' % start)
- if o_params:
- q.extend(['o=%s' % p for p in o_params])
- path = 'changes/?%s' % '&'.join(q)
- try:
- result = ReadHttpJsonResponse(CreateHttpConn(host, path))
- except GerritError as e:
- msg = '%s:\n%s' % (e.message, path)
- raise GerritError(e.http_status, msg)
- return result
- def GetGerritFetchUrl(host):
- """Given a gerrit host name returns URL of a gerrit instance to fetch from."""
- return '%s://%s/' % (GERRIT_PROTOCOL, host)
- def GetChangePageUrl(host, change_number):
- """Given a gerrit host name and change number, return change page url."""
- return '%s://%s/#/c/%d/' % (GERRIT_PROTOCOL, host, change_number)
- def GetChangeUrl(host, change):
- """Given a gerrit host name and change id, return an url for the change."""
- return '%s://%s/a/changes/%s' % (GERRIT_PROTOCOL, host, change)
- def GetChange(host, change):
- """Query a gerrit server for information about a single change."""
- path = 'changes/%s' % change
- return ReadHttpJsonResponse(CreateHttpConn(host, path))
- def GetChangeDetail(host, change, o_params=None):
- """Query a gerrit server for extended information about a single change."""
- path = 'changes/%s/detail' % change
- if o_params:
- path += '?%s' % '&'.join(['o=%s' % p for p in o_params])
- return ReadHttpJsonResponse(CreateHttpConn(host, path))
- def GetChangeCommit(host, change, revision='current'):
- """Query a gerrit server for a revision associated with a change."""
- path = 'changes/%s/revisions/%s/commit?links' % (change, revision)
- return ReadHttpJsonResponse(CreateHttpConn(host, path))
- def GetChangeCurrentRevision(host, change):
- """Get information about the latest revision for a given change."""
- return QueryChanges(host, [], change, o_params=('CURRENT_REVISION',))
- def GetChangeRevisions(host, change):
- """Get information about all revisions associated with a change."""
- return QueryChanges(host, [], change, o_params=('ALL_REVISIONS',))
- def GetChangeReview(host, change, revision=None):
- """Get the current review information for a change."""
- if not revision:
- jmsg = GetChangeRevisions(host, change)
- if not jmsg:
- return None
- elif len(jmsg) > 1:
- raise GerritError(200, 'Multiple changes found for ChangeId %s.' % change)
- revision = jmsg[0]['current_revision']
- path = 'changes/%s/revisions/%s/review'
- return ReadHttpJsonResponse(CreateHttpConn(host, path))
- def GetChangeComments(host, change):
- """Get the line- and file-level comments on a change."""
- path = 'changes/%s/comments' % change
- return ReadHttpJsonResponse(CreateHttpConn(host, path))
- def AbandonChange(host, change, msg=''):
- """Abandon a gerrit change."""
- path = 'changes/%s/abandon' % change
- body = {'message': msg} if msg else {}
- conn = CreateHttpConn(host, path, reqtype='POST', body=body)
- return ReadHttpJsonResponse(conn)
- def RestoreChange(host, change, msg=''):
- """Restore a previously abandoned change."""
- path = 'changes/%s/restore' % change
- body = {'message': msg} if msg else {}
- conn = CreateHttpConn(host, path, reqtype='POST', body=body)
- return ReadHttpJsonResponse(conn)
- def SubmitChange(host, change, wait_for_merge=True):
- """Submits a gerrit change via Gerrit."""
- path = 'changes/%s/submit' % change
- body = {'wait_for_merge': wait_for_merge}
- conn = CreateHttpConn(host, path, reqtype='POST', body=body)
- return ReadHttpJsonResponse(conn)
- def HasPendingChangeEdit(host, change):
- conn = CreateHttpConn(host, 'changes/%s/edit' % change)
- try:
- ReadHttpResponse(conn)
- except GerritError as e:
- # 204 No Content means no pending change.
- if e.http_status == 204:
- return False
- raise
- return True
- def DeletePendingChangeEdit(host, change):
- conn = CreateHttpConn(host, 'changes/%s/edit' % change, reqtype='DELETE')
- # On success, gerrit returns status 204; if the edit was already deleted it
- # returns 404. Anything else is an error.
- ReadHttpResponse(conn, accept_statuses=[204, 404])
- def SetCommitMessage(host, change, description, notify='ALL'):
- """Updates a commit message."""
- assert notify in ('ALL', 'NONE')
- path = 'changes/%s/message' % change
- body = {'message': description, 'notify': notify}
- conn = CreateHttpConn(host, path, reqtype='PUT', body=body)
- try:
- ReadHttpResponse(conn, accept_statuses=[200, 204])
- except GerritError as e:
- raise GerritError(
- e.http_status,
- 'Received unexpected http status while editing message '
- 'in change %s' % change)
- def GetReviewers(host, change):
- """Get information about all reviewers attached to a change."""
- path = 'changes/%s/reviewers' % change
- return ReadHttpJsonResponse(CreateHttpConn(host, path))
- def GetReview(host, change, revision):
- """Get review information about a specific revision of a change."""
- path = 'changes/%s/revisions/%s/review' % (change, revision)
- return ReadHttpJsonResponse(CreateHttpConn(host, path))
- def AddReviewers(host, change, reviewers=None, ccs=None, notify=True,
- accept_statuses=frozenset([200, 400, 422])):
- """Add reviewers to a change."""
- if not reviewers and not ccs:
- return None
- if not change:
- return None
- reviewers = frozenset(reviewers or [])
- ccs = frozenset(ccs or [])
- path = 'changes/%s/revisions/current/review' % change
- body = {
- 'drafts': 'KEEP',
- 'reviewers': [],
- 'notify': 'ALL' if notify else 'NONE',
- }
- for r in sorted(reviewers | ccs):
- state = 'REVIEWER' if r in reviewers else 'CC'
- body['reviewers'].append({
- 'reviewer': r,
- 'state': state,
- 'notify': 'NONE', # We handled `notify` argument above.
- })
- conn = CreateHttpConn(host, path, reqtype='POST', body=body)
- # Gerrit will return 400 if one or more of the requested reviewers are
- # unprocessable. We read the response object to see which were rejected,
- # warn about them, and retry with the remainder.
- resp = ReadHttpJsonResponse(conn, accept_statuses=accept_statuses)
- errored = set()
- for result in resp.get('reviewers', {}).itervalues():
- r = result.get('input')
- state = 'REVIEWER' if r in reviewers else 'CC'
- if result.get('error'):
- errored.add(r)
- LOGGER.warn('Note: "%s" not added as a %s' % (r, state.lower()))
- if errored:
- # Try again, adding only those that didn't fail, and only accepting 200.
- AddReviewers(host, change, reviewers=(reviewers-errored),
- ccs=(ccs-errored), notify=notify, accept_statuses=[200])
- def RemoveReviewers(host, change, remove=None):
- """Remove reveiewers from a change."""
- if not remove:
- return
- if isinstance(remove, basestring):
- remove = (remove,)
- for r in remove:
- path = 'changes/%s/reviewers/%s' % (change, r)
- conn = CreateHttpConn(host, path, reqtype='DELETE')
- try:
- ReadHttpResponse(conn, accept_statuses=[204])
- except GerritError as e:
- raise GerritError(
- e.http_status,
- 'Received unexpected http status while deleting reviewer "%s" '
- 'from change %s' % (r, change))
- def SetReview(host, change, msg=None, labels=None, notify=None, ready=None):
- """Set labels and/or add a message to a code review."""
- if not msg and not labels:
- return
- path = 'changes/%s/revisions/current/review' % change
- body = {'drafts': 'KEEP'}
- if msg:
- body['message'] = msg
- if labels:
- body['labels'] = labels
- if notify is not None:
- body['notify'] = 'ALL' if notify else 'NONE'
- if ready:
- body['ready'] = True
- conn = CreateHttpConn(host, path, reqtype='POST', body=body)
- response = ReadHttpJsonResponse(conn)
- if labels:
- for key, val in labels.iteritems():
- if ('labels' not in response or key not in response['labels'] or
- int(response['labels'][key] != int(val))):
- raise GerritError(200, 'Unable to set "%s" label on change %s.' % (
- key, change))
- def ResetReviewLabels(host, change, label, value='0', message=None,
- notify=None):
- """Reset the value of a given label for all reviewers on a change."""
- # This is tricky, because we want to work on the "current revision", but
- # there's always the risk that "current revision" will change in between
- # API calls. So, we check "current revision" at the beginning and end; if
- # it has changed, raise an exception.
- jmsg = GetChangeCurrentRevision(host, change)
- if not jmsg:
- raise GerritError(
- 200, 'Could not get review information for change "%s"' % change)
- value = str(value)
- revision = jmsg[0]['current_revision']
- path = 'changes/%s/revisions/%s/review' % (change, revision)
- message = message or (
- '%s label set to %s programmatically.' % (label, value))
- jmsg = GetReview(host, change, revision)
- if not jmsg:
- raise GerritError(200, 'Could not get review information for revison %s '
- 'of change %s' % (revision, change))
- for review in jmsg.get('labels', {}).get(label, {}).get('all', []):
- if str(review.get('value', value)) != value:
- body = {
- 'drafts': 'KEEP',
- 'message': message,
- 'labels': {label: value},
- 'on_behalf_of': review['_account_id'],
- }
- if notify:
- body['notify'] = notify
- conn = CreateHttpConn(
- host, path, reqtype='POST', body=body)
- response = ReadHttpJsonResponse(conn)
- if str(response['labels'][label]) != value:
- username = review.get('email', jmsg.get('name', ''))
- raise GerritError(200, 'Unable to set %s label for user "%s"'
- ' on change %s.' % (label, username, change))
- jmsg = GetChangeCurrentRevision(host, change)
- if not jmsg:
- raise GerritError(
- 200, 'Could not get review information for change "%s"' % change)
- elif jmsg[0]['current_revision'] != revision:
- raise GerritError(200, 'While resetting labels on change "%s", '
- 'a new patchset was uploaded.' % change)
- def CreateGerritBranch(host, project, branch, commit):
- """
- Create a new branch from given project and commit
- https://gerrit-review.googlesource.com/Documentation/rest-api-projects.html#create-branch
- Returns:
- A JSON with 'ref' key
- """
- path = 'projects/%s/branches/%s' % (project, branch)
- body = {'revision': commit}
- conn = CreateHttpConn(host, path, reqtype='PUT', body=body)
- response = ReadHttpJsonResponse(conn, accept_statuses=[201])
- if response:
- return response
- raise GerritError(200, 'Unable to create gerrit branch')
- def GetGerritBranch(host, project, branch):
- """
- Get a branch from given project and commit
- https://gerrit-review.googlesource.com/Documentation/rest-api-projects.html#get-branch
- Returns:
- A JSON object with 'revision' key
- """
- path = 'projects/%s/branches/%s' % (project, branch)
- conn = CreateHttpConn(host, path, reqtype='GET')
- response = ReadHttpJsonResponse(conn)
- if response:
- return response
- raise GerritError(200, 'Unable to get gerrit branch')
- def GetAccountDetails(host, account_id='self'):
- """Returns details of the account.
- If account_id is not given, uses magic value 'self' which corresponds to
- whichever account user is authenticating as.
- Documentation:
- https://gerrit-review.googlesource.com/Documentation/rest-api-accounts.html#get-account
- Returns None if account is not found (i.e., Gerrit returned 404).
- """
- conn = CreateHttpConn(host, '/accounts/%s' % account_id)
- return ReadHttpJsonResponse(conn, accept_statuses=[200, 404])
- def ValidAccounts(host, accounts, max_threads=10):
- """Returns a mapping from valid account to its details.
- Invalid accounts, either not existing or without unique match,
- are not present as returned dictionary keys.
- """
- assert not isinstance(accounts, basestring), type(accounts)
- accounts = list(set(accounts))
- if not accounts:
- return {}
- def get_one(account):
- try:
- return account, GetAccountDetails(host, account)
- except GerritError:
- return None, None
- valid = {}
- with contextlib.closing(ThreadPool(min(max_threads, len(accounts)))) as pool:
- for account, details in pool.map(get_one, accounts):
- if account and details:
- valid[account] = details
- return valid
- def PercentEncodeForGitRef(original):
- """Apply percent-encoding for strings sent to gerrit via git ref metadata.
- The encoding used is based on but stricter than URL encoding (Section 2.1
- of RFC 3986). The only non-escaped characters are alphanumerics, and
- 'SPACE' (U+0020) can be represented as 'LOW LINE' (U+005F) or
- 'PLUS SIGN' (U+002B).
- For more information, see the Gerrit docs here:
- https://gerrit-review.googlesource.com/Documentation/user-upload.html#message
- """
- safe = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789 '
- encoded = ''.join(c if c in safe else '%%%02X' % ord(c) for c in original)
- # spaces are not allowed in git refs; gerrit will interpret either '_' or
- # '+' (or '%20') as space. Use '_' since that has been supported the longest.
- return encoded.replace(' ', '_')
- @contextlib.contextmanager
- def tempdir():
- tdir = None
- try:
- tdir = tempfile.mkdtemp(suffix='gerrit_util')
- yield tdir
- finally:
- if tdir:
- gclient_utils.rmtree(tdir)
- def ChangeIdentifier(project, change_number):
- """Returns change identifier "project~number" suitable for |chagne| arg of
- this module API.
- Such format is allows for more efficient Gerrit routing of HTTP requests,
- comparing to specifying just change_number.
- """
- assert int(change_number)
- return '%s~%s' % (urllib.quote(project, safe=''), change_number)
- # TODO(crbug/881860): remove this hack.
- _GERRIT_MIRROR_PREFIXES = ['us1', 'us2', 'us3', 'eu1']
- assert all(3 == len(p) for p in _GERRIT_MIRROR_PREFIXES)
- def _UseGerritMirror(url, host):
- """Returns new url which uses randomly selected mirror for a gerrit host.
- url's host should be for a given host or a result of prior call to this
- function.
- Assumes url has a single occurence of the host substring.
- """
- assert host in url
- suffix = '-mirror-' + host
- prefixes = set(_GERRIT_MIRROR_PREFIXES)
- prefix_len = len(_GERRIT_MIRROR_PREFIXES[0])
- st = url.find(suffix)
- if st == -1:
- actual_host = host
- else:
- # Already uses some mirror.
- assert st >= prefix_len, (uri, host, st, prefix_len)
- prefixes.remove(url[st-prefix_len:st])
- actual_host = url[st-prefix_len:st+len(suffix)]
- return url.replace(actual_host, random.choice(list(prefixes)) + suffix)
|