ソースを参照

Add support for GCS deps

Also take out GCS calling logic from download_google_storage and
into call_google_storage.

GCS deps look like:
   'src/third_party/node/linux': {
       'dep_type': 'gcs',
       'condition': 'checkout_linux',
       'bucket': 'chromium-nodejs/20.11.0',
       'object_name': '46795170ff5df9831955f163f6966abde581c8af',
       'sha256sum': '887504c37404898ca41b896f448ee6d7fc24179d8fb6a4b79d028ab7e1b7153d',
   },

   'src/third_party/llvm-build/Release+Asserts': {
       'dep_type': 'gcs',
       'condition': 'checkout_linux',
       'bucket': 'chromium-browser-clang',
       'object_name': 'Linux_x64/clang-llvmorg-18-init-17730-gf670112a-2.tar.xz',
       'sha256sum': '1e46df9b4e63c074064d75646310cb76be2f19815997a8486987189d80f991e8',
   },

Example directory for src/third_party/node/linux after gclient sync:
- tar_file.gz is the downloaded file from GCS.
- node_linux_x64/ is extracted in its path.
- `hash` contains the sha of GCS filename.
```
chromium/src/ ->
   third_party/node/linux/ ->
       hash, tar_file.gz, node_linux_x64/
```

Bug: b/324418194
Change-Id: Ibcbbff27e211f194ddb8a08494af56570a84a12b
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/tools/depot_tools/+/5299722
Commit-Queue: Stephanie Kim <kimstephanie@google.com>
Reviewed-by: Joanna Wang <jojwang@chromium.org>
Stephanie Kim 1 年間 前
コミット
3eedee7b55

+ 144 - 0
call_google_storage.py

@@ -0,0 +1,144 @@
+#!/usr/bin/env python3
+# Copyright (c) 2024 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Download files from Google Storage, given the bucket and file."""
+
+import optparse
+import os
+import re
+import sys
+
+import subprocess2
+
+# Env vars that tempdir can be gotten from; minimally, this
+# needs to match python's tempfile module and match normal
+# unix standards.
+_TEMPDIR_ENV_VARS = ('TMPDIR', 'TEMP', 'TMP')
+
+GSUTIL_DEFAULT_PATH = os.path.join(os.path.dirname(os.path.abspath(__file__)),
+                                   'gsutil.py')
+
+# Maps sys.platform to what we actually want to call them.
+PLATFORM_MAPPING = {
+    'cygwin': 'win',
+    'darwin': 'mac',
+    'linux': 'linux',  # Python 3.3+.
+    'win32': 'win',
+    'aix6': 'aix',
+    'aix7': 'aix',
+    'zos': 'zos',
+}
+
+
+def GetNormalizedPlatform():
+    """Returns the result of sys.platform accounting for cygwin.
+    Under cygwin, this will always return "win32" like the native Python."""
+    if sys.platform == 'cygwin':
+        return 'win32'
+    return sys.platform
+
+
+# Common utilities
+class Gsutil(object):
+    """Call gsutil with some predefined settings.  This is a convenience object,
+    and is also immutable.
+
+    HACK: This object is used directly by the external script
+        `<depot_tools>/win_toolchain/get_toolchain_if_necessary.py`
+    """
+
+    MAX_TRIES = 5
+    RETRY_BASE_DELAY = 5.0
+    RETRY_DELAY_MULTIPLE = 1.3
+    VPYTHON3 = ('vpython3.bat'
+                if GetNormalizedPlatform() == 'win32' else 'vpython3')
+
+    def __init__(self, path, boto_path=None):
+        if not os.path.exists(path):
+            raise FileNotFoundError('GSUtil not found in %s' % path)
+        self.path = path
+        self.boto_path = boto_path
+
+    def get_sub_env(self):
+        env = os.environ.copy()
+        if self.boto_path == os.devnull:
+            env['AWS_CREDENTIAL_FILE'] = ''
+            env['BOTO_CONFIG'] = ''
+        elif self.boto_path:
+            env['AWS_CREDENTIAL_FILE'] = self.boto_path
+            env['BOTO_CONFIG'] = self.boto_path
+
+        if PLATFORM_MAPPING[sys.platform] != 'win':
+            env.update((x, "/tmp") for x in _TEMPDIR_ENV_VARS)
+
+        return env
+
+    def call(self, *args):
+        cmd = [self.VPYTHON3, self.path]
+        cmd.extend(args)
+        return subprocess2.call(cmd, env=self.get_sub_env())
+
+    def check_call(self, *args):
+        cmd = [self.VPYTHON3, self.path]
+        cmd.extend(args)
+        ((out, err), code) = subprocess2.communicate(cmd,
+                                                     stdout=subprocess2.PIPE,
+                                                     stderr=subprocess2.PIPE,
+                                                     env=self.get_sub_env())
+
+        out = out.decode('utf-8', 'replace')
+        err = err.decode('utf-8', 'replace')
+
+        # Parse output.
+        status_code_match = re.search('status=([0-9]+)', err)
+        if status_code_match:
+            return (int(status_code_match.group(1)), out, err)
+        if ('ServiceException: 401 Anonymous' in err):
+            return (401, out, err)
+        if ('You are attempting to access protected data with '
+                'no configured credentials.' in err):
+            return (403, out, err)
+        if 'matched no objects' in err or 'No URLs matched' in err:
+            return (404, out, err)
+        return (code, out, err)
+
+    def check_call_with_retries(self, *args):
+        delay = self.RETRY_BASE_DELAY
+        for i in range(self.MAX_TRIES):
+            code, out, err = self.check_call(*args)
+            if not code or i == self.MAX_TRIES - 1:
+                break
+
+            time.sleep(delay)
+            delay *= self.RETRY_DELAY_MULTIPLE
+
+        return code, out, err
+
+
+def main(args):
+    parser = optparse.OptionParser()
+    parser.add_option('-b',
+                      '--bucket',
+                      help='Google Storage bucket to fetch from.')
+    parser.add_option('-p', '--file', help='Path of file to fetch.')
+    parser.add_option('-o',
+                      '--output',
+                      help='Path where GCS contents should be downloaded.')
+    parser.add_option('-e', '--boto', help='Specify a custom boto file.')
+    (options, args) = parser.parse_args()
+
+    file_url = 'gs://%s/%s' % (options.bucket, options.file)
+
+    # Make sure gsutil exists where we expect it to.
+    if os.path.exists(GSUTIL_DEFAULT_PATH):
+        gsutil = Gsutil(GSUTIL_DEFAULT_PATH, boto_path=options.boto)
+    else:
+        parser.error('gsutil not found in %s, bad depot_tools checkout?' %
+                     GSUTIL_DEFAULT_PATH)
+
+    gsutil.check_call('cp', file_url, options.output)
+
+
+if __name__ == '__main__':
+    sys.exit(main(sys.argv))

+ 5 - 107
download_from_google_storage.py

@@ -18,25 +18,7 @@ import threading
 import time
 import time
 
 
 import subprocess2
 import subprocess2
-
-# Env vars that tempdir can be gotten from; minimally, this
-# needs to match python's tempfile module and match normal
-# unix standards.
-_TEMPDIR_ENV_VARS = ('TMPDIR', 'TEMP', 'TMP')
-
-GSUTIL_DEFAULT_PATH = os.path.join(os.path.dirname(os.path.abspath(__file__)),
-                                   'gsutil.py')
-# Maps sys.platform to what we actually want to call them.
-PLATFORM_MAPPING = {
-    'cygwin': 'win',
-    'darwin': 'mac',
-    'linux': 'linux',  # Python 3.3+.
-    'linux2': 'linux',  # Python < 3.3 uses "linux2" / "linux3".
-    'win32': 'win',
-    'aix6': 'aix',
-    'aix7': 'aix',
-    'zos': 'zos',
-}
+import call_google_storage
 
 
 
 
 class InvalidFileError(IOError):
 class InvalidFileError(IOError):
@@ -47,91 +29,6 @@ class InvalidPlatformError(Exception):
     pass
     pass
 
 
 
 
-def GetNormalizedPlatform():
-    """Returns the result of sys.platform accounting for cygwin.
-    Under cygwin, this will always return "win32" like the native Python."""
-    if sys.platform == 'cygwin':
-        return 'win32'
-    return sys.platform
-
-
-# Common utilities
-class Gsutil(object):
-    """Call gsutil with some predefined settings.  This is a convenience object,
-    and is also immutable.
-
-    HACK: This object is used directly by the external script
-        `<depot_tools>/win_toolchain/get_toolchain_if_necessary.py`
-    """
-
-    MAX_TRIES = 5
-    RETRY_BASE_DELAY = 5.0
-    RETRY_DELAY_MULTIPLE = 1.3
-    VPYTHON3 = ('vpython3.bat'
-                if GetNormalizedPlatform() == 'win32' else 'vpython3')
-
-    def __init__(self, path, boto_path=None):
-        if not os.path.exists(path):
-            raise FileNotFoundError('GSUtil not found in %s' % path)
-        self.path = path
-        self.boto_path = boto_path
-
-    def get_sub_env(self):
-        env = os.environ.copy()
-        if self.boto_path == os.devnull:
-            env['AWS_CREDENTIAL_FILE'] = ''
-            env['BOTO_CONFIG'] = ''
-        elif self.boto_path:
-            env['AWS_CREDENTIAL_FILE'] = self.boto_path
-            env['BOTO_CONFIG'] = self.boto_path
-
-        if PLATFORM_MAPPING[sys.platform] != 'win':
-            env.update((x, "/tmp") for x in _TEMPDIR_ENV_VARS)
-
-        return env
-
-    def call(self, *args):
-        cmd = [self.VPYTHON3, self.path]
-        cmd.extend(args)
-        return subprocess2.call(cmd, env=self.get_sub_env())
-
-    def check_call(self, *args):
-        cmd = [self.VPYTHON3, self.path]
-        cmd.extend(args)
-        ((out, err), code) = subprocess2.communicate(cmd,
-                                                     stdout=subprocess2.PIPE,
-                                                     stderr=subprocess2.PIPE,
-                                                     env=self.get_sub_env())
-
-        out = out.decode('utf-8', 'replace')
-        err = err.decode('utf-8', 'replace')
-
-        # Parse output.
-        status_code_match = re.search('status=([0-9]+)', err)
-        if status_code_match:
-            return (int(status_code_match.group(1)), out, err)
-        if ('ServiceException: 401 Anonymous' in err):
-            return (401, out, err)
-        if ('You are attempting to access protected data with '
-                'no configured credentials.' in err):
-            return (403, out, err)
-        if 'matched no objects' in err or 'No URLs matched' in err:
-            return (404, out, err)
-        return (code, out, err)
-
-    def check_call_with_retries(self, *args):
-        delay = self.RETRY_BASE_DELAY
-        for i in range(self.MAX_TRIES):
-            code, out, err = self.check_call(*args)
-            if not code or i == self.MAX_TRIES - 1:
-                break
-
-            time.sleep(delay)
-            delay *= self.RETRY_DELAY_MULTIPLE
-
-        return code, out, err
-
-
 def check_platform(target):
 def check_platform(target):
     """Checks if any parent directory of target matches (win|mac|linux)."""
     """Checks if any parent directory of target matches (win|mac|linux)."""
     assert os.path.isabs(target)
     assert os.path.isabs(target)
@@ -629,11 +526,12 @@ def main(args):
         options.boto = os.environ.get('NO_AUTH_BOTO_CONFIG', os.devnull)
         options.boto = os.environ.get('NO_AUTH_BOTO_CONFIG', os.devnull)
 
 
     # Make sure gsutil exists where we expect it to.
     # Make sure gsutil exists where we expect it to.
-    if os.path.exists(GSUTIL_DEFAULT_PATH):
-        gsutil = Gsutil(GSUTIL_DEFAULT_PATH, boto_path=options.boto)
+    if os.path.exists(call_google_storage.GSUTIL_DEFAULT_PATH):
+        gsutil = Gsutil(call_google_storage.GSUTIL_DEFAULT_PATH,
+                        boto_path=options.boto)
     else:
     else:
         parser.error('gsutil not found in %s, bad depot_tools checkout?' %
         parser.error('gsutil not found in %s, bad depot_tools checkout?' %
-                     GSUTIL_DEFAULT_PATH)
+                     call_google_storage.GSUTIL_DEFAULT_PATH)
 
 
     # Passing in -g/--config will run our copy of GSUtil, then quit.
     # Passing in -g/--config will run our copy of GSUtil, then quit.
     if options.config:
     if options.config:

+ 171 - 0
gclient.py

@@ -82,6 +82,7 @@
 __version__ = '0.7'
 __version__ = '0.7'
 
 
 import copy
 import copy
+import hashlib
 import json
 import json
 import logging
 import logging
 import optparse
 import optparse
@@ -91,6 +92,9 @@ import posixpath
 import pprint
 import pprint
 import re
 import re
 import sys
 import sys
+import shutil
+import tarfile
+import tempfile
 import time
 import time
 import urllib.parse
 import urllib.parse
 
 
@@ -748,6 +752,17 @@ class Dependency(gclient_utils.WorkItem, DependencySettings):
                                        should_process=should_process,
                                        should_process=should_process,
                                        relative=use_relative_paths,
                                        relative=use_relative_paths,
                                        condition=condition))
                                        condition=condition))
+            elif dep_type == 'gcs':
+                deps_to_add.append(
+                    GcsDependency(parent=self,
+                                  name=name,
+                                  bucket=dep_value['bucket'],
+                                  object_name=dep_value['object_name'],
+                                  sha256sum=dep_value['sha256sum'],
+                                  custom_vars=self.custom_vars,
+                                  should_process=should_process,
+                                  relative=use_relative_paths,
+                                  condition=condition))
             else:
             else:
                 url = dep_value.get('url')
                 url = dep_value.get('url')
                 deps_to_add.append(
                 deps_to_add.append(
@@ -2483,6 +2498,162 @@ it or fix the checkout.
         return self._enforced_cpu
         return self._enforced_cpu
 
 
 
 
+class GcsDependency(Dependency):
+    """A Dependency object that represents a single GCS bucket and object"""
+
+    def __init__(self, parent, name, bucket, object_name, sha256sum,
+                 custom_vars, should_process, relative, condition):
+        self.bucket = bucket
+        self.object_name = object_name
+        self.sha256sum = sha256sum
+        url = 'gs://{bucket}/{object_name}'.format(
+            bucket=self.bucket,
+            object_name=self.object_name,
+        )
+        super(GcsDependency, self).__init__(parent=parent,
+                                            name=name,
+                                            url=url,
+                                            managed=None,
+                                            custom_deps=None,
+                                            custom_vars=custom_vars,
+                                            custom_hooks=None,
+                                            deps_file=None,
+                                            should_process=should_process,
+                                            should_recurse=False,
+                                            relative=relative,
+                                            condition=condition)
+
+    #override
+    def run(self, revision_overrides, command, args, work_queue, options,
+            patch_refs, target_branches, skip_sync_revisions):
+        """Downloads GCS package."""
+        logging.info('GcsDependency(%s).run()' % self.name)
+        if not self.should_process:
+            return
+        self.DownloadGoogleStorage()
+        super(GcsDependency,
+              self).run(revision_overrides, command, args, work_queue, options,
+                        patch_refs, target_branches, skip_sync_revisions)
+
+    def WriteFilenameHash(self, sha1, hash_file):
+        with open(hash_file, 'w') as f:
+            f.write(sha1)
+            f.write('\n')
+
+    def IsDownloadNeeded(self, output_dir, output_file):
+        """Check if download and extract is needed."""
+        download_needed = False
+        if not os.path.exists(output_file):
+            download_needed = True
+
+        hash_file = os.path.join(output_dir, 'hash')
+        existing_hash = None
+        if os.path.exists(hash_file):
+            try:
+                with open(hash_file, 'r') as f:
+                    existing_hash = f.read().rstrip()
+            except IOError:
+                download_needed = True
+        else:
+            download_needed = True
+
+        if existing_hash != self.sha256sum:
+            download_needed = True
+        return download_needed
+
+    def GetSha256Sum(self, filename):
+        sha = hashlib.sha256()
+        with open(filename, 'rb') as f:
+            while True:
+                # Read in 1mb chunks, so it doesn't all have to be loaded into
+                # memory.
+                chunk = f.read(1024 * 1024)
+                if not chunk:
+                    break
+                sha.update(chunk)
+        return sha.hexdigest()
+
+    def DownloadGoogleStorage(self):
+        """Calls call_google_storage.py script."""
+        gcs_file_name = self.object_name.split('/')[-1]
+        root_dir = self.root.root_dir
+
+        # Directory of the extracted tarfile contents
+        output_dir = os.path.join(root_dir, self.name)
+        output_file = os.path.join(output_dir, gcs_file_name)
+
+        if not self.IsDownloadNeeded(output_dir, output_file):
+            return
+
+        # Remove hashfile
+        hash_file = os.path.join(output_dir, 'hash')
+        if os.path.exists(hash_file):
+            os.remove(hash_file)
+
+        # Remove tarfile
+        if os.path.exists(output_file):
+            os.remove(output_file)
+
+        # Remove extracted contents
+        if os.path.exists(output_dir):
+            shutil.rmtree(output_dir)
+        os.makedirs(output_dir)
+
+        call_google_storage_path = os.path.join(DEPOT_TOOLS_DIR,
+                                                'call_google_storage.py')
+        cmd = [
+            'vpython3',
+            call_google_storage_path,
+            '--bucket',
+            self.bucket,
+            '--file',
+            self.object_name,
+            '--output',
+            output_file,
+        ]
+        if os.getenv('GCLIENT_TEST') == '1':
+            # Create fake tar file and extracted tar contents
+            tmpdir = tempfile.mkdtemp()
+            copy_dir = os.path.join(tmpdir, self.name, 'extracted_dir')
+            if os.path.exists(copy_dir):
+                shutil.rmtree(copy_dir)
+            os.makedirs(copy_dir)
+            with open(os.path.join(copy_dir, 'extracted_file'), 'w+') as f:
+                f.write('extracted text')
+            with tarfile.open(output_file, "w:gz") as tar:
+                tar.add(copy_dir, arcname=os.path.basename(copy_dir))
+        else:
+            subprocess2.call(cmd)
+
+        calculated_sha256sum = ''
+        if os.getenv('GCLIENT_TEST') == '1':
+            calculated_sha256sum = 'abcd123'
+        else:
+            calculated_sha256sum = self.GetSha256Sum(output_file)
+
+        if calculated_sha256sum != self.sha256sum:
+            raise Exception('sha256sum does not match calculated hash. '
+                            '{original} vs {calculated}'.format(
+                                original=self.sha256sum,
+                                calculated=calculated_sha256sum,
+                            ))
+
+        with tarfile.open(output_file, 'r:*') as tar:
+            tar.extractall(path=output_dir)
+        self.WriteFilenameHash(calculated_sha256sum, hash_file)
+
+    #override
+    def GetScmName(self):
+        """Always 'gcs'."""
+        return 'gcs'
+
+    #override
+    def CreateSCM(self, out_cb=None):
+        """Create a Wrapper instance suitable for handling this GCS dependency."""
+        return gclient_scm.GcsWrapper(self.url, self.root.root_dir, self.name,
+                                      self.outbuf, out_cb)
+
+
 class CipdDependency(Dependency):
 class CipdDependency(Dependency):
     """A Dependency object that represents a single CIPD package."""
     """A Dependency object that represents a single CIPD package."""
     def __init__(self, parent, name, dep_value, cipd_root, custom_vars,
     def __init__(self, parent, name, dep_value, cipd_root, custom_vars,

+ 8 - 0
gclient_eval.py

@@ -131,6 +131,14 @@ _GCLIENT_DEPS_SCHEMA = _NodeDictSchema({
             schema.Optional('dep_type', default='cipd'):
             schema.Optional('dep_type', default='cipd'):
             str,
             str,
         }),
         }),
+        # GCS content.
+        _NodeDictSchema({
+            'bucket': str,
+            'object_name': str,
+            'sha256sum': str,
+            schema.Optional('condition'): str,
+            schema.Optional('dep_type', default='gcs'): str,
+        }),
     ),
     ),
 })
 })
 
 

+ 51 - 0
gclient_scm.py

@@ -1913,6 +1913,57 @@ class CipdWrapper(SCMWrapper):
     """
     """
 
 
 
 
+class GcsWrapper(SCMWrapper):
+    """Wrapper for GCS.
+
+  Currently only supports content from Google Cloud Storage.
+  """
+    name = 'gcs'
+
+    def __init__(self,
+                 url=None,
+                 root_dir=None,
+                 relpath=None,
+                 out_fh=None,
+                 out_cb=None):
+        super(GcsWrapper, self).__init__(url=url,
+                                         root_dir=root_dir,
+                                         relpath=relpath,
+                                         out_fh=out_fh,
+                                         out_cb=out_cb)
+
+    #override
+    def GetCacheMirror(self):
+        return None
+
+    #override
+    def GetActualRemoteURL(self, options):
+        return None
+
+    #override
+    def DoesRemoteURLMatch(self, options):
+        del options
+        return True
+
+    def revert(self, options, args, file_list):
+        """Does nothing."""
+
+    def diff(self, options, args, file_list):
+        """GCS has no notion of diffing."""
+
+    def pack(self, options, args, file_list):
+        """GCS has no notion of diffing."""
+
+    def revinfo(self, options, args, file_list):
+        """Does nothing"""
+
+    def status(self, options, args, file_list):
+        pass
+
+    def update(self, options, args, file_list):
+        """Does nothing."""
+
+
 class CogWrapper(SCMWrapper):
 class CogWrapper(SCMWrapper):
     """Wrapper for Cog, all no-op."""
     """Wrapper for Cog, all no-op."""
     name = 'cog'
     name = 'cog'

+ 1 - 1
git_cache.py

@@ -16,7 +16,7 @@ import threading
 import time
 import time
 import urllib.parse
 import urllib.parse
 
 
-from download_from_google_storage import Gsutil
+from call_google_storage import Gsutil
 import gclient_utils
 import gclient_utils
 import lockfile
 import lockfile
 import metrics
 import metrics

+ 62 - 1
testing_support/fake_repos.py

@@ -16,6 +16,7 @@ import random
 import re
 import re
 import socket
 import socket
 import sys
 import sys
+import tarfile
 import tempfile
 import tempfile
 import textwrap
 import textwrap
 import time
 import time
@@ -47,6 +48,9 @@ def read_tree(tree_root):
         for f in [join(root, f) for f in files if not f.startswith('.')]:
         for f in [join(root, f) for f in files if not f.startswith('.')]:
             filepath = f[len(tree_root) + 1:].replace(os.sep, '/')
             filepath = f[len(tree_root) + 1:].replace(os.sep, '/')
             assert len(filepath) > 0, f
             assert len(filepath) > 0, f
+            if tarfile.is_tarfile(join(root, f)):
+                tree[filepath] = 'tarfile'
+                continue
             with io.open(join(root, f), encoding='utf-8') as f:
             with io.open(join(root, f), encoding='utf-8') as f:
                 tree[filepath] = f.read()
                 tree[filepath] = f.read()
     return tree
     return tree
@@ -210,7 +214,7 @@ class FakeReposBase(object):
 
 
 class FakeRepos(FakeReposBase):
 class FakeRepos(FakeReposBase):
     """Implements populateGit()."""
     """Implements populateGit()."""
-    NB_GIT_REPOS = 21
+    NB_GIT_REPOS = 23
 
 
     def populateGit(self):
     def populateGit(self):
         # Testing:
         # Testing:
@@ -881,6 +885,63 @@ deps = {
             },
             },
         )
         )
 
 
+        self._commit_git(
+            'repo_22', {
+                'DEPS':
+                textwrap.dedent("""\
+        vars = {}
+        deps = {
+          'src/gcs_dep': {
+            'bucket': '123bucket',
+            'object_name': 'deadbeef',
+            'dep_type': 'gcs',
+            'sha256sum': 'abcd123',
+          },
+          'src/another_gcs_dep': {
+            'bucket': '456bucket',
+            'object_name': 'Linux/llvmfile.tar.gz',
+            'dep_type': 'gcs',
+            'sha256sum': 'abcd123',
+          },
+        }"""),
+                'origin':
+                'git/repo_22@1\n'
+            })
+
+        self._commit_git(
+            'repo_23', {
+                'DEPS': """
+deps = {
+  'src/repo12': '/repo_12',
+}""",
+                'origin': 'git/repo_23@1\n',
+            })
+
+        self._commit_git(
+            'repo_23', {
+                'DEPS': """
+deps = {
+  'src/repo12': '/repo_12@refs/changes/1212',
+}""",
+                'origin': 'git/repo_23@2\n',
+            })
+
+        # src/repo12 is now a GCS dependency.
+        self._commit_git(
+            'repo_23', {
+                'DEPS': """
+deps = {
+  'src/repo12': {
+    'bucket': 'bucket123',
+    'object_name': 'path_to_file.tar.gz',
+    'dep_type': 'gcs',
+    'sha256sum': 'abcd123',
+  },
+}
+""",
+                'origin': 'git/repo_23@3\n'
+            })
+
 
 
 class FakeRepoSkiaDEPS(FakeReposBase):
 class FakeRepoSkiaDEPS(FakeReposBase):
     """Simulates the Skia DEPS transition in Chrome."""
     """Simulates the Skia DEPS transition in Chrome."""

+ 2 - 1
tests/download_from_google_storage_unittest.py

@@ -18,6 +18,7 @@ import unittest
 
 
 sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
 sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
 
 
+import call_google_storage
 import upload_to_google_storage
 import upload_to_google_storage
 import download_from_google_storage
 import download_from_google_storage
 
 
@@ -151,7 +152,7 @@ class GstoolsUnitTests(unittest.TestCase):
 
 
     def test_gsutil(self):
     def test_gsutil(self):
         # This will download a real gsutil package from Google Storage.
         # This will download a real gsutil package from Google Storage.
-        gsutil = download_from_google_storage.Gsutil(GSUTIL_DEFAULT_PATH, None)
+        gsutil = call_google_storage.Gsutil(GSUTIL_DEFAULT_PATH, None)
         self.assertEqual(gsutil.path, GSUTIL_DEFAULT_PATH)
         self.assertEqual(gsutil.path, GSUTIL_DEFAULT_PATH)
         code, _, err = gsutil.check_call()
         code, _, err = gsutil.check_call()
         self.assertEqual(code, 0, err)
         self.assertEqual(code, 0, err)

+ 140 - 0
tests/gclient_gcs_smoketest.py

@@ -0,0 +1,140 @@
+#!/usr/bin/env vpython3
+# Copyright (c) 2024 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Smoke tests for gclient.py.
+
+Shell out 'gclient' and run gcs tests.
+"""
+
+import logging
+import os
+import sys
+import unittest
+
+from unittest import mock
+import gclient_smoketest_base
+import subprocess2
+
+ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+
+
+class GClientSmokeGcs(gclient_smoketest_base.GClientSmokeBase):
+
+    def setUp(self):
+        super(GClientSmokeGcs, self).setUp()
+        self.enabled = self.FAKE_REPOS.set_up_git()
+        if not self.enabled:
+            self.skipTest('git fake repos not available')
+        self.env['PATH'] = (os.path.join(ROOT_DIR, 'testing_support') +
+                            os.pathsep + self.env['PATH'])
+
+    def testSyncGcs(self):
+        self.gclient(['config', self.git_base + 'repo_22', '--name', 'src'])
+        self.gclient(['sync'])
+
+        tree = self.mangle_git_tree(('repo_22@1', 'src'))
+        tree.update({
+            'src/another_gcs_dep/hash':
+            'abcd123\n',
+            'src/another_gcs_dep/llvmfile.tar.gz':
+            'tarfile',
+            'src/another_gcs_dep/extracted_dir/extracted_file':
+            'extracted text',
+            'src/gcs_dep/deadbeef':
+            'tarfile',
+            'src/gcs_dep/hash':
+            'abcd123\n',
+            'src/gcs_dep/extracted_dir/extracted_file':
+            'extracted text',
+        })
+        self.assertTree(tree)
+
+    def testConvertGitToGcs(self):
+        self.gclient(['config', self.git_base + 'repo_23', '--name', 'src'])
+
+        # repo_13@1 has src/repo12 as a git dependency.
+        self.gclient([
+            'sync', '-v', '-v', '-v', '--revision',
+            self.githash('repo_23', 1)
+        ])
+
+        tree = self.mangle_git_tree(('repo_23@1', 'src'),
+                                    ('repo_12@1', 'src/repo12'))
+        self.assertTree(tree)
+
+        # repo_23@3 has src/repo12 as a gcs dependency.
+        self.gclient([
+            'sync', '-v', '-v', '-v', '--revision',
+            self.githash('repo_23', 3), '--delete_unversioned_trees'
+        ])
+
+        tree = self.mangle_git_tree(('repo_23@3', 'src'))
+        tree.update({
+            'src/repo12/extracted_dir/extracted_file': 'extracted text',
+            'src/repo12/hash': 'abcd123\n',
+            'src/repo12/path_to_file.tar.gz': 'tarfile',
+        })
+        self.assertTree(tree)
+
+    def testConvertGcsToGit(self):
+        self.gclient(['config', self.git_base + 'repo_23', '--name', 'src'])
+
+        # repo_13@3 has src/repo12 as a cipd dependency.
+        self.gclient([
+            'sync', '-v', '-v', '-v', '--revision',
+            self.githash('repo_23', 3), '--delete_unversioned_trees'
+        ])
+
+        tree = self.mangle_git_tree(('repo_23@3', 'src'))
+        tree.update({
+            'src/repo12/extracted_dir/extracted_file': 'extracted text',
+            'src/repo12/hash': 'abcd123\n',
+            'src/repo12/path_to_file.tar.gz': 'tarfile',
+        })
+        self.assertTree(tree)
+
+        # repo_23@1 has src/repo12 as a git dependency.
+        self.gclient([
+            'sync', '-v', '-v', '-v', '--revision',
+            self.githash('repo_23', 1)
+        ])
+
+        tree = self.mangle_git_tree(('repo_23@1', 'src'),
+                                    ('repo_12@1', 'src/repo12'))
+        tree.update({
+            'src/repo12/extracted_dir/extracted_file': 'extracted text',
+            'src/repo12/hash': 'abcd123\n',
+            'src/repo12/path_to_file.tar.gz': 'tarfile',
+        })
+        self.assertTree(tree)
+
+    def testRevInfo(self):
+        self.gclient(['config', self.git_base + 'repo_22', '--name', 'src'])
+        self.gclient(['sync'])
+        results = self.gclient(['revinfo'])
+        out = ('src: %(base)srepo_22\n'
+               'src/another_gcs_dep: gs://456bucket/Linux/llvmfile.tar.gz\n'
+               'src/gcs_dep: gs://123bucket/deadbeef\n' % {
+                   'base': self.git_base,
+               })
+        self.check((out, '', 0), results)
+
+    def testRevInfoActual(self):
+        self.gclient(['config', self.git_base + 'repo_22', '--name', 'src'])
+        self.gclient(['sync'])
+        results = self.gclient(['revinfo', '--actual'])
+        out = (
+            'src: %(base)srepo_22@%(hash1)s\n'
+            'src/another_gcs_dep: gs://456bucket/Linux/llvmfile.tar.gz@None\n'
+            'src/gcs_dep: gs://123bucket/deadbeef@None\n' % {
+                'base': self.git_base,
+                'hash1': self.githash('repo_22', 1),
+            })
+        self.check((out, '', 0), results)
+
+
+if __name__ == '__main__':
+    if '-v' in sys.argv:
+        logging.basicConfig(level=logging.DEBUG)
+    unittest.main()

+ 27 - 0
tests/gclient_scm_test.py

@@ -1022,6 +1022,33 @@ class CipdWrapperTestCase(unittest.TestCase):
         scm.update(None, (), [])
         scm.update(None, (), [])
 
 
 
 
+class GcsWrapperTestCase(unittest.TestCase):
+
+    def setUp(self):
+        self.workdir = tempfile.mkdtemp()
+        self.url = 'gs://123bucket/path_to_tar.gz'
+
+    def createScm(self):
+        return gclient_scm.GcsWrapper(url=self.url,
+                                      root_dir=self.workdir,
+                                      relpath='fake_relpath')
+
+    def testRevert(self):
+        """Checks that revert does nothing."""
+        scm = self.createScm()
+        scm.revert(None, (), [])
+
+    def testRevinfo(self):
+        """Checks that revinfo does nothing."""
+        scm = self.createScm()
+        scm.revinfo(None, (), [])
+
+    def testUpdate(self):
+        """Checks that update does nothing."""
+        scm = self.createScm()
+        scm.update(None, (), [])
+
+
 class BranchHeadsFakeRepo(fake_repos.FakeReposBase):
 class BranchHeadsFakeRepo(fake_repos.FakeReposBase):
     def populateGit(self):
     def populateGit(self):
         # Creates a tree that looks like this:
         # Creates a tree that looks like this:

+ 2 - 2
upload_to_google_storage.py

@@ -16,10 +16,10 @@ import tarfile
 import threading
 import threading
 import time
 import time
 
 
+from call_google_storage import Gsutil
+from call_google_storage import GSUTIL_DEFAULT_PATH
 from download_from_google_storage import get_sha1
 from download_from_google_storage import get_sha1
-from download_from_google_storage import Gsutil
 from download_from_google_storage import PrinterThread
 from download_from_google_storage import PrinterThread
-from download_from_google_storage import GSUTIL_DEFAULT_PATH
 
 
 USAGE_STRING = """%prog [options] target [target2 ...].
 USAGE_STRING = """%prog [options] target [target2 ...].
 Target is the file intended to be uploaded to Google Storage.
 Target is the file intended to be uploaded to Google Storage.

+ 6 - 6
win_toolchain/get_toolchain_if_necessary.py

@@ -60,7 +60,7 @@ BASEDIR = os.path.dirname(os.path.abspath(__file__))
 DEPOT_TOOLS_PATH = os.path.join(BASEDIR, '..')
 DEPOT_TOOLS_PATH = os.path.join(BASEDIR, '..')
 sys.path.append(DEPOT_TOOLS_PATH)
 sys.path.append(DEPOT_TOOLS_PATH)
 try:
 try:
-    import download_from_google_storage
+    import call_google_storage
 except ImportError:
 except ImportError:
     # Allow use of utility functions in this script from package_from_installed
     # Allow use of utility functions in this script from package_from_installed
     # on bare VM that doesn't have a full depot_tools.
     # on bare VM that doesn't have a full depot_tools.
@@ -255,8 +255,8 @@ def LooksLikeGoogler():
 
 
 def CanAccessToolchainBucket():
 def CanAccessToolchainBucket():
     """Checks whether the user has access to gs://chrome-wintoolchain/."""
     """Checks whether the user has access to gs://chrome-wintoolchain/."""
-    gsutil = download_from_google_storage.Gsutil(
-        download_from_google_storage.GSUTIL_DEFAULT_PATH, boto_path=None)
+    gsutil = call_google_storage.Gsutil(call_google_storage.GSUTIL_DEFAULT_PATH,
+                                        boto_path=None)
     code, stdout, stderr = gsutil.check_call('ls', 'gs://chrome-wintoolchain/')
     code, stdout, stderr = gsutil.check_call('ls', 'gs://chrome-wintoolchain/')
     if code != 0:
     if code != 0:
         # Make sure any error messages are made visible to the user.
         # Make sure any error messages are made visible to the user.
@@ -294,7 +294,7 @@ def RequestGsAuthentication():
     print('I\'m sorry for the hassle, but you need to do a one-time manual')
     print('I\'m sorry for the hassle, but you need to do a one-time manual')
     print('authentication. Please run:')
     print('authentication. Please run:')
     print()
     print()
-    print('    download_from_google_storage --config')
+    print('    call_google_storage --config')
     print()
     print()
     print('and follow the instructions.')
     print('and follow the instructions.')
     print()
     print()
@@ -342,8 +342,8 @@ def DownloadUsingGsutil(filename):
     temp_dir = tempfile.mkdtemp()
     temp_dir = tempfile.mkdtemp()
     assert os.path.basename(filename) == filename
     assert os.path.basename(filename) == filename
     target_path = os.path.join(temp_dir, filename)
     target_path = os.path.join(temp_dir, filename)
-    gsutil = download_from_google_storage.Gsutil(
-        download_from_google_storage.GSUTIL_DEFAULT_PATH, boto_path=None)
+    gsutil = call_google_storage.Gsutil(call_google_storage.GSUTIL_DEFAULT_PATH,
+                                        boto_path=None)
     code = gsutil.call('cp', 'gs://chrome-wintoolchain/' + filename,
     code = gsutil.call('cp', 'gs://chrome-wintoolchain/' + filename,
                        target_path)
                        target_path)
     if code != 0:
     if code != 0: