xref: /aosp_15_r20/external/autotest/client/bin/result_tools/utils.py (revision 9c5db1993ded3edbeafc8092d69fe5de2ee02df7)
1*9c5db199SXin Li#!/usr/bin/python3
2*9c5db199SXin Li# Copyright 2017 The Chromium OS Authors. All rights reserved.
3*9c5db199SXin Li# Use of this source code is governed by a BSD-style license that can be
4*9c5db199SXin Li# found in the LICENSE file.
5*9c5db199SXin Li
6*9c5db199SXin Li"""
7*9c5db199SXin LiThis is a utility to build a summary of the given directory. and save to a json
8*9c5db199SXin Lifile.
9*9c5db199SXin Li
10*9c5db199SXin Liusage: utils.py [-h] [-p PATH] [-m MAX_SIZE_KB]
11*9c5db199SXin Li
12*9c5db199SXin Lioptional arguments:
13*9c5db199SXin Li  -p PATH         Path to build directory summary.
14*9c5db199SXin Li  -m MAX_SIZE_KB  Maximum result size in KB. Set to 0 to disable result
15*9c5db199SXin Li                  throttling.
16*9c5db199SXin Li
17*9c5db199SXin LiThe content of the json file looks like:
18*9c5db199SXin Li{'default': {'/D': [{'control': {'/S': 734}},
19*9c5db199SXin Li                    {'debug': {'/D': [{'client.0.DEBUG': {'/S': 5698}},
20*9c5db199SXin Li                                       {'client.0.ERROR': {'/S': 254}},
21*9c5db199SXin Li                                       {'client.0.INFO': {'/S': 1020}},
22*9c5db199SXin Li                                       {'client.0.WARNING': {'/S': 242}}],
23*9c5db199SXin Li                               '/S': 7214}}
24*9c5db199SXin Li                      ],
25*9c5db199SXin Li              '/S': 7948
26*9c5db199SXin Li            }
27*9c5db199SXin Li}
28*9c5db199SXin Li"""
29*9c5db199SXin Li
30*9c5db199SXin Lifrom __future__ import division
31*9c5db199SXin Lifrom __future__ import print_function
32*9c5db199SXin Li
33*9c5db199SXin Liimport argparse
34*9c5db199SXin Liimport copy
35*9c5db199SXin Liimport fnmatch
36*9c5db199SXin Liimport glob
37*9c5db199SXin Liimport json
38*9c5db199SXin Liimport logging
39*9c5db199SXin Liimport os
40*9c5db199SXin Liimport random
41*9c5db199SXin Lifrom six.moves import range
42*9c5db199SXin Liimport sys
43*9c5db199SXin Liimport time
44*9c5db199SXin Liimport traceback
45*9c5db199SXin Li
46*9c5db199SXin Litry:
47*9c5db199SXin Li    from autotest_lib.client.bin.result_tools import dedupe_file_throttler
48*9c5db199SXin Li    from autotest_lib.client.bin.result_tools import delete_file_throttler
49*9c5db199SXin Li    from autotest_lib.client.bin.result_tools import result_info
50*9c5db199SXin Li    from autotest_lib.client.bin.result_tools import throttler_lib
51*9c5db199SXin Li    from autotest_lib.client.bin.result_tools import utils_lib
52*9c5db199SXin Li    from autotest_lib.client.bin.result_tools import zip_file_throttler
53*9c5db199SXin Liexcept ImportError:
54*9c5db199SXin Li    import dedupe_file_throttler
55*9c5db199SXin Li    import delete_file_throttler
56*9c5db199SXin Li    import result_info
57*9c5db199SXin Li    import throttler_lib
58*9c5db199SXin Li    import utils_lib
59*9c5db199SXin Li    import zip_file_throttler
60*9c5db199SXin Li
61*9c5db199SXin Li
62*9c5db199SXin Li# Do NOT import autotest_lib modules here. This module can be executed without
63*9c5db199SXin Li# dependency on other autotest modules. This is to keep the logic of result
64*9c5db199SXin Li# trimming on the server side, instead of depending on the autotest client
65*9c5db199SXin Li# module.
66*9c5db199SXin Li
67*9c5db199SXin LiDEFAULT_SUMMARY_FILENAME_FMT = 'dir_summary_%d.json'
68*9c5db199SXin LiSUMMARY_FILE_PATTERN = 'dir_summary_*.json'
69*9c5db199SXin LiMERGED_SUMMARY_FILENAME = 'dir_summary_final.json'
70*9c5db199SXin Li
71*9c5db199SXin Li# Minimum disk space should be available after saving the summary file.
72*9c5db199SXin LiMIN_FREE_DISK_BYTES = 10 * 1024 * 1024
73*9c5db199SXin Li
74*9c5db199SXin Li# Autotest uses some state files to track process running state. The files are
75*9c5db199SXin Li# deleted from test results. Therefore, these files can be ignored.
76*9c5db199SXin LiFILES_TO_IGNORE = set([
77*9c5db199SXin Li    'control.autoserv.state'
78*9c5db199SXin Li])
79*9c5db199SXin Li
80*9c5db199SXin Li# Smallest file size to shrink to.
81*9c5db199SXin LiMIN_FILE_SIZE_LIMIT_BYTE = 10 * 1024
82*9c5db199SXin Li
83*9c5db199SXin Lidef get_unique_dir_summary_file(path):
84*9c5db199SXin Li    """Get a unique file path to save the directory summary json string.
85*9c5db199SXin Li
86*9c5db199SXin Li    @param path: The directory path to save the summary file to.
87*9c5db199SXin Li    """
88*9c5db199SXin Li    summary_file = DEFAULT_SUMMARY_FILENAME_FMT % time.time()
89*9c5db199SXin Li    # Make sure the summary file name is unique.
90*9c5db199SXin Li    file_name = os.path.join(path, summary_file)
91*9c5db199SXin Li    if os.path.exists(file_name):
92*9c5db199SXin Li        count = 1
93*9c5db199SXin Li        name, ext = os.path.splitext(summary_file)
94*9c5db199SXin Li        while os.path.exists(file_name):
95*9c5db199SXin Li            file_name = os.path.join(path, '%s_%s%s' % (name, count, ext))
96*9c5db199SXin Li            count += 1
97*9c5db199SXin Li    return file_name
98*9c5db199SXin Li
99*9c5db199SXin Li
100*9c5db199SXin Lidef _preprocess_result_dir_path(path):
101*9c5db199SXin Li    """Verify the result directory path is valid and make sure it ends with `/`.
102*9c5db199SXin Li
103*9c5db199SXin Li    @param path: A path to the result directory.
104*9c5db199SXin Li    @return: A verified and processed path to the result directory.
105*9c5db199SXin Li    @raise IOError: If the path doesn't exist.
106*9c5db199SXin Li    @raise ValueError: If the path is not a directory.
107*9c5db199SXin Li    """
108*9c5db199SXin Li    if not os.path.exists(path):
109*9c5db199SXin Li        raise IOError('Path %s does not exist.' % path)
110*9c5db199SXin Li
111*9c5db199SXin Li    if not os.path.isdir(path):
112*9c5db199SXin Li        raise ValueError('The given path %s is a file. It must be a '
113*9c5db199SXin Li                         'directory.' % path)
114*9c5db199SXin Li
115*9c5db199SXin Li    # Make sure the path ends with `/` so the root key of summary json is always
116*9c5db199SXin Li    # utils_lib.ROOT_DIR ('')
117*9c5db199SXin Li    if not path.endswith(os.sep):
118*9c5db199SXin Li        path = path + os.sep
119*9c5db199SXin Li
120*9c5db199SXin Li    return path
121*9c5db199SXin Li
122*9c5db199SXin Li
123*9c5db199SXin Lidef _delete_missing_entries(summary_old, summary_new):
124*9c5db199SXin Li    """Delete files/directories only exists in old summary.
125*9c5db199SXin Li
126*9c5db199SXin Li    When the new summary is final, i.e., it's built from the final result
127*9c5db199SXin Li    directory, files or directories missing are considered to be deleted and
128*9c5db199SXin Li    trimmed to size 0.
129*9c5db199SXin Li
130*9c5db199SXin Li    @param summary_old: Old directory summary.
131*9c5db199SXin Li    @param summary_new: New directory summary.
132*9c5db199SXin Li    """
133*9c5db199SXin Li    new_files = summary_new.get_file_names()
134*9c5db199SXin Li    old_files = summary_old.get_file_names()
135*9c5db199SXin Li    for name in old_files:
136*9c5db199SXin Li        old_file = summary_old.get_file(name)
137*9c5db199SXin Li        if name not in new_files:
138*9c5db199SXin Li            if old_file.is_dir:
139*9c5db199SXin Li                # Trim sub-directories.
140*9c5db199SXin Li                with old_file.disable_updating_parent_size_info():
141*9c5db199SXin Li                    _delete_missing_entries(old_file, result_info.EMPTY)
142*9c5db199SXin Li                old_file.update_sizes()
143*9c5db199SXin Li            elif name in FILES_TO_IGNORE:
144*9c5db199SXin Li                # Remove the file from the summary as it can be ignored.
145*9c5db199SXin Li                summary_old.remove_file(name)
146*9c5db199SXin Li            else:
147*9c5db199SXin Li                with old_file.disable_updating_parent_size_info():
148*9c5db199SXin Li                    # Before setting the trimmed size to 0, update the collected
149*9c5db199SXin Li                    # size if it's not set yet.
150*9c5db199SXin Li                    if not old_file.is_collected_size_recorded:
151*9c5db199SXin Li                        old_file.collected_size = old_file.trimmed_size
152*9c5db199SXin Li                    old_file.trimmed_size = 0
153*9c5db199SXin Li        elif old_file.is_dir:
154*9c5db199SXin Li            # If `name` is a directory in the old summary, but a file in the new
155*9c5db199SXin Li            # summary, delete the entry in the old summary.
156*9c5db199SXin Li            new_file = summary_new.get_file(name)
157*9c5db199SXin Li            if not new_file.is_dir:
158*9c5db199SXin Li                new_file = result_info.EMPTY
159*9c5db199SXin Li            _delete_missing_entries(old_file, new_file)
160*9c5db199SXin Li
161*9c5db199SXin Li
162*9c5db199SXin Lidef _relocate_summary(result_dir, summary_file, summary):
163*9c5db199SXin Li    """Update the given summary with the path relative to the result_dir.
164*9c5db199SXin Li
165*9c5db199SXin Li    @param result_dir: Path to the result directory.
166*9c5db199SXin Li    @param summary_file: Path to the summary file.
167*9c5db199SXin Li    @param summary: A directory summary inside the given result_dir or its
168*9c5db199SXin Li            sub-directory.
169*9c5db199SXin Li    @return: An updated summary with the path relative to the result_dir.
170*9c5db199SXin Li    """
171*9c5db199SXin Li    sub_path = os.path.dirname(summary_file).replace(
172*9c5db199SXin Li            result_dir.rstrip(os.sep), '')
173*9c5db199SXin Li    if sub_path == '':
174*9c5db199SXin Li        return summary
175*9c5db199SXin Li
176*9c5db199SXin Li    folders = sub_path.split(os.sep)
177*9c5db199SXin Li
178*9c5db199SXin Li    # The first folder is always '' because of the leading `/` in sub_path.
179*9c5db199SXin Li    parent = result_info.ResultInfo(
180*9c5db199SXin Li            result_dir, utils_lib.ROOT_DIR, parent_result_info=None)
181*9c5db199SXin Li    root = parent
182*9c5db199SXin Li
183*9c5db199SXin Li    # That makes sure root has only one folder of utils_lib.ROOT_DIR.
184*9c5db199SXin Li    for i in range(1, len(folders)):
185*9c5db199SXin Li        child = result_info.ResultInfo(
186*9c5db199SXin Li                parent.path, folders[i], parent_result_info=parent)
187*9c5db199SXin Li        if i == len(folders) - 1:
188*9c5db199SXin Li            # Add files in summary to child.
189*9c5db199SXin Li            for info in summary.files:
190*9c5db199SXin Li                child.files.append(info)
191*9c5db199SXin Li
192*9c5db199SXin Li        parent.files.append(child)
193*9c5db199SXin Li        parent = child
194*9c5db199SXin Li
195*9c5db199SXin Li    parent.update_sizes()
196*9c5db199SXin Li    return root
197*9c5db199SXin Li
198*9c5db199SXin Li
199*9c5db199SXin Lidef merge_summaries(path):
200*9c5db199SXin Li    """Merge all directory summaries in the given path.
201*9c5db199SXin Li
202*9c5db199SXin Li    This function calculates the total size of result files being collected for
203*9c5db199SXin Li    the test device and the files generated on the drone. It also returns merged
204*9c5db199SXin Li    directory summary.
205*9c5db199SXin Li
206*9c5db199SXin Li    @param path: A path to search for directory summaries.
207*9c5db199SXin Li    @return a tuple of (client_collected_bytes, merged_summary, files):
208*9c5db199SXin Li            client_collected_bytes: The total size of results collected from
209*9c5db199SXin Li                the DUT. The number can be larger than the total file size of
210*9c5db199SXin Li                the given path, as files can be overwritten or removed.
211*9c5db199SXin Li            merged_summary: The merged directory summary of the given path.
212*9c5db199SXin Li            files: All summary files in the given path, including
213*9c5db199SXin Li                sub-directories.
214*9c5db199SXin Li    """
215*9c5db199SXin Li    path = _preprocess_result_dir_path(path)
216*9c5db199SXin Li    # Find all directory summary files and sort them by the time stamp in file
217*9c5db199SXin Li    # name.
218*9c5db199SXin Li    summary_files = []
219*9c5db199SXin Li    for root, _, filenames in os.walk(path):
220*9c5db199SXin Li        for filename in fnmatch.filter(filenames, 'dir_summary_*.json'):
221*9c5db199SXin Li            summary_files.append(os.path.join(root, filename))
222*9c5db199SXin Li    summary_files = sorted(summary_files, key=os.path.getmtime)
223*9c5db199SXin Li
224*9c5db199SXin Li    all_summaries = []
225*9c5db199SXin Li    for summary_file in summary_files:
226*9c5db199SXin Li        try:
227*9c5db199SXin Li            summary = result_info.load_summary_json_file(summary_file)
228*9c5db199SXin Li            summary = _relocate_summary(path, summary_file, summary)
229*9c5db199SXin Li            all_summaries.append(summary)
230*9c5db199SXin Li        except (IOError, ValueError) as e:
231*9c5db199SXin Li            utils_lib.LOG('Failed to load summary file %s Error: %s' %
232*9c5db199SXin Li                          (summary_file, e))
233*9c5db199SXin Li
234*9c5db199SXin Li    # Merge all summaries.
235*9c5db199SXin Li    merged_summary = all_summaries[0] if len(all_summaries) > 0 else None
236*9c5db199SXin Li    for summary in all_summaries[1:]:
237*9c5db199SXin Li        merged_summary.merge(summary)
238*9c5db199SXin Li    # After all summaries from the test device (client side) are merged, we can
239*9c5db199SXin Li    # get the total size of result files being transfered from the test device.
240*9c5db199SXin Li    # If there is no directory summary collected, default client_collected_bytes
241*9c5db199SXin Li    # to 0.
242*9c5db199SXin Li    client_collected_bytes = 0
243*9c5db199SXin Li    if merged_summary:
244*9c5db199SXin Li        client_collected_bytes = merged_summary.collected_size
245*9c5db199SXin Li
246*9c5db199SXin Li    # Get the summary of current directory
247*9c5db199SXin Li    last_summary = result_info.ResultInfo.build_from_path(path)
248*9c5db199SXin Li
249*9c5db199SXin Li    if merged_summary:
250*9c5db199SXin Li        merged_summary.merge(last_summary, is_final=True)
251*9c5db199SXin Li        _delete_missing_entries(merged_summary, last_summary)
252*9c5db199SXin Li    else:
253*9c5db199SXin Li        merged_summary = last_summary
254*9c5db199SXin Li
255*9c5db199SXin Li    return client_collected_bytes, merged_summary, summary_files
256*9c5db199SXin Li
257*9c5db199SXin Li
258*9c5db199SXin Lidef _throttle_results(summary, max_result_size_KB):
259*9c5db199SXin Li    """Throttle the test results by limiting to the given maximum size.
260*9c5db199SXin Li
261*9c5db199SXin Li    @param summary: A ResultInfo object containing result summary.
262*9c5db199SXin Li    @param max_result_size_KB: Maximum test result size in KB.
263*9c5db199SXin Li    """
264*9c5db199SXin Li    if throttler_lib.check_throttle_limit(summary, max_result_size_KB):
265*9c5db199SXin Li        utils_lib.LOG(
266*9c5db199SXin Li                'Result size is %s, which is less than %d KB. No need to '
267*9c5db199SXin Li                'throttle.' %
268*9c5db199SXin Li                (utils_lib.get_size_string(summary.trimmed_size),
269*9c5db199SXin Li                 max_result_size_KB))
270*9c5db199SXin Li        return
271*9c5db199SXin Li
272*9c5db199SXin Li    args = {'summary': summary,
273*9c5db199SXin Li            'max_result_size_KB': max_result_size_KB}
274*9c5db199SXin Li    args_skip_autotest_log = copy.copy(args)
275*9c5db199SXin Li    args_skip_autotest_log['skip_autotest_log'] = True
276*9c5db199SXin Li    # Apply the throttlers in following order.
277*9c5db199SXin Li    throttlers = [
278*9c5db199SXin Li            (zip_file_throttler, copy.copy(args_skip_autotest_log)),
279*9c5db199SXin Li            (dedupe_file_throttler, copy.copy(args)),
280*9c5db199SXin Li            (zip_file_throttler, copy.copy(args)),
281*9c5db199SXin Li            ]
282*9c5db199SXin Li
283*9c5db199SXin Li    # Add another zip_file_throttler to compress the files being shrunk.
284*9c5db199SXin Li    # The threshold is set to half of the DEFAULT_FILE_SIZE_LIMIT_BYTE of
285*9c5db199SXin Li    # shrink_file_throttler.
286*9c5db199SXin Li    new_args = copy.copy(args)
287*9c5db199SXin Li    new_args['file_size_threshold_byte'] = 50 * 1024
288*9c5db199SXin Li    throttlers.append((zip_file_throttler, new_args))
289*9c5db199SXin Li
290*9c5db199SXin Li    # If the above throttlers still can't reduce the result size to be under
291*9c5db199SXin Li    # max_result_size_KB, try to delete files with various threshold, starting
292*9c5db199SXin Li    # at 5MB then lowering to 100KB.
293*9c5db199SXin Li    delete_file_thresholds = [5*1024*1024, 1*1024*1024, 100*1024]
294*9c5db199SXin Li    # Try to keep tgz files first.
295*9c5db199SXin Li    exclude_file_patterns = ['.*\.tgz']
296*9c5db199SXin Li    for threshold in delete_file_thresholds:
297*9c5db199SXin Li        new_args = copy.copy(args)
298*9c5db199SXin Li        new_args.update({'file_size_threshold_byte': threshold,
299*9c5db199SXin Li                         'exclude_file_patterns': exclude_file_patterns})
300*9c5db199SXin Li        throttlers.append((delete_file_throttler, new_args))
301*9c5db199SXin Li    # Add one more delete_file_throttler to not skipping tgz files.
302*9c5db199SXin Li    new_args = copy.copy(args)
303*9c5db199SXin Li    new_args.update({'file_size_threshold_byte': delete_file_thresholds[-1]})
304*9c5db199SXin Li    throttlers.append((delete_file_throttler, new_args))
305*9c5db199SXin Li
306*9c5db199SXin Li    # Run the throttlers in order until result size is under max_result_size_KB.
307*9c5db199SXin Li    old_size = summary.trimmed_size
308*9c5db199SXin Li    for throttler, args in throttlers:
309*9c5db199SXin Li        try:
310*9c5db199SXin Li            args_without_summary = copy.copy(args)
311*9c5db199SXin Li            del args_without_summary['summary']
312*9c5db199SXin Li            utils_lib.LOG('Applying throttler %s, args: %s' %
313*9c5db199SXin Li                          (throttler.__name__, args_without_summary))
314*9c5db199SXin Li            throttler.throttle(**args)
315*9c5db199SXin Li            if throttler_lib.check_throttle_limit(summary, max_result_size_KB):
316*9c5db199SXin Li                return
317*9c5db199SXin Li        except:
318*9c5db199SXin Li            utils_lib.LOG('Failed to apply throttler %s. Exception: %s' %
319*9c5db199SXin Li                          (throttler, traceback.format_exc()))
320*9c5db199SXin Li        finally:
321*9c5db199SXin Li            new_size = summary.trimmed_size
322*9c5db199SXin Li            if new_size == old_size:
323*9c5db199SXin Li                utils_lib.LOG('Result size was not changed: %s.' % old_size)
324*9c5db199SXin Li            else:
325*9c5db199SXin Li                utils_lib.LOG('Result size was reduced from %s to %s.' %
326*9c5db199SXin Li                              (utils_lib.get_size_string(old_size),
327*9c5db199SXin Li                               utils_lib.get_size_string(new_size)))
328*9c5db199SXin Li
329*9c5db199SXin Li
330*9c5db199SXin Lidef _setup_logging():
331*9c5db199SXin Li    """Set up logging to direct logs to stdout."""
332*9c5db199SXin Li    # Direct logging to stdout
333*9c5db199SXin Li    logger = logging.getLogger()
334*9c5db199SXin Li    logger.setLevel(logging.DEBUG)
335*9c5db199SXin Li    handler = logging.StreamHandler(sys.stdout)
336*9c5db199SXin Li    handler.setLevel(logging.DEBUG)
337*9c5db199SXin Li    formatter = logging.Formatter('%(asctime)s %(message)s')
338*9c5db199SXin Li    handler.setFormatter(formatter)
339*9c5db199SXin Li    logger.handlers = []
340*9c5db199SXin Li    logger.addHandler(handler)
341*9c5db199SXin Li
342*9c5db199SXin Li
343*9c5db199SXin Lidef _parse_options():
344*9c5db199SXin Li    """Options for the main script.
345*9c5db199SXin Li
346*9c5db199SXin Li    @return: An option object container arg values.
347*9c5db199SXin Li    """
348*9c5db199SXin Li    parser = argparse.ArgumentParser()
349*9c5db199SXin Li    parser.add_argument('-p', type=str, dest='path',
350*9c5db199SXin Li                        help='Path to build directory summary.')
351*9c5db199SXin Li    parser.add_argument('-m', type=int, dest='max_size_KB', default=0,
352*9c5db199SXin Li                        help='Maximum result size in KB. Set to 0 to disable '
353*9c5db199SXin Li                        'result throttling.')
354*9c5db199SXin Li    parser.add_argument('-d', action='store_true', dest='delete_summaries',
355*9c5db199SXin Li                        default=False,
356*9c5db199SXin Li                        help='-d to delete all result summary files in the '
357*9c5db199SXin Li                        'given path.')
358*9c5db199SXin Li    return parser.parse_args()
359*9c5db199SXin Li
360*9c5db199SXin Li
361*9c5db199SXin Lidef execute(path, max_size_KB):
362*9c5db199SXin Li    """Execute the script with given arguments.
363*9c5db199SXin Li
364*9c5db199SXin Li    @param path: Path to build directory summary.
365*9c5db199SXin Li    @param max_size_KB: Maximum result size in KB.
366*9c5db199SXin Li    """
367*9c5db199SXin Li    utils_lib.LOG('Running result_tools/utils on path: %s' % path)
368*9c5db199SXin Li    utils_lib.LOG('Running result_tools/utils in pyversion %s ' % sys.version)
369*9c5db199SXin Li
370*9c5db199SXin Li    if max_size_KB > 0:
371*9c5db199SXin Li        utils_lib.LOG('Throttle result size to : %s' %
372*9c5db199SXin Li                      utils_lib.get_size_string(max_size_KB * 1024))
373*9c5db199SXin Li
374*9c5db199SXin Li    result_dir = path
375*9c5db199SXin Li    if not os.path.isdir(result_dir):
376*9c5db199SXin Li        result_dir = os.path.dirname(result_dir)
377*9c5db199SXin Li    summary = result_info.ResultInfo.build_from_path(path)
378*9c5db199SXin Li    summary_json = json.dumps(summary)
379*9c5db199SXin Li    summary_file = get_unique_dir_summary_file(result_dir)
380*9c5db199SXin Li
381*9c5db199SXin Li    # Make sure there is enough free disk to write the file
382*9c5db199SXin Li    stat = os.statvfs(path)
383*9c5db199SXin Li    free_space = stat.f_frsize * stat.f_bavail
384*9c5db199SXin Li    if free_space - len(summary_json) < MIN_FREE_DISK_BYTES:
385*9c5db199SXin Li        raise utils_lib.NotEnoughDiskError(
386*9c5db199SXin Li                'Not enough disk space after saving the summary file. '
387*9c5db199SXin Li                'Available free disk: %s bytes. Summary file size: %s bytes.' %
388*9c5db199SXin Li                (free_space, len(summary_json)))
389*9c5db199SXin Li
390*9c5db199SXin Li    with open(summary_file, 'w') as f:
391*9c5db199SXin Li        f.write(summary_json)
392*9c5db199SXin Li    utils_lib.LOG('Directory summary of %s is saved to file %s.' %
393*9c5db199SXin Li                  (path, summary_file))
394*9c5db199SXin Li
395*9c5db199SXin Li    if max_size_KB > 0 and summary.trimmed_size > 0:
396*9c5db199SXin Li        old_size = summary.trimmed_size
397*9c5db199SXin Li        throttle_probability = float(max_size_KB * 1024) / old_size
398*9c5db199SXin Li        if random.random() < throttle_probability:
399*9c5db199SXin Li            utils_lib.LOG(
400*9c5db199SXin Li                    'Skip throttling %s: size=%s, throttle_probability=%s' %
401*9c5db199SXin Li                    (path, old_size, throttle_probability))
402*9c5db199SXin Li        else:
403*9c5db199SXin Li            _throttle_results(summary, max_size_KB)
404*9c5db199SXin Li            if summary.trimmed_size < old_size:
405*9c5db199SXin Li                # Files are throttled, save the updated summary file.
406*9c5db199SXin Li                utils_lib.LOG('Overwrite the summary file: %s' % summary_file)
407*9c5db199SXin Li                result_info.save_summary(summary, summary_file)
408*9c5db199SXin Li
409*9c5db199SXin Li
410*9c5db199SXin Lidef _delete_summaries(path):
411*9c5db199SXin Li    """Delete all directory summary files in the given directory.
412*9c5db199SXin Li
413*9c5db199SXin Li    This is to cleanup the directory so no summary files are left behind to
414*9c5db199SXin Li    affect later tests.
415*9c5db199SXin Li
416*9c5db199SXin Li    @param path: Path to cleanup directory summary.
417*9c5db199SXin Li    """
418*9c5db199SXin Li    # Only summary files directly under the `path` needs to be cleaned.
419*9c5db199SXin Li    summary_files = glob.glob(os.path.join(path, SUMMARY_FILE_PATTERN))
420*9c5db199SXin Li    for summary in summary_files:
421*9c5db199SXin Li        try:
422*9c5db199SXin Li            os.remove(summary)
423*9c5db199SXin Li        except IOError as e:
424*9c5db199SXin Li            utils_lib.LOG('Failed to delete summary: %s. Error: %s' %
425*9c5db199SXin Li                          (summary, e))
426*9c5db199SXin Li
427*9c5db199SXin Li
428*9c5db199SXin Lidef main():
429*9c5db199SXin Li    """main script. """
430*9c5db199SXin Li    _setup_logging()
431*9c5db199SXin Li    options = _parse_options()
432*9c5db199SXin Li    if options.delete_summaries:
433*9c5db199SXin Li        _delete_summaries(options.path)
434*9c5db199SXin Li    else:
435*9c5db199SXin Li        execute(options.path, options.max_size_KB)
436*9c5db199SXin Li
437*9c5db199SXin Li
438*9c5db199SXin Liif __name__ == '__main__':
439*9c5db199SXin Li    main()
440