# Copyright (C) 2010 Google Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. from datetime import datetime from django.utils import simplejson import logging from model.testfile import TestFile JSON_RESULTS_FILE = "results.json" JSON_RESULTS_PREFIX = "ADD_RESULTS(" JSON_RESULTS_SUFFIX = ");" JSON_RESULTS_VERSION_KEY = "version" JSON_RESULTS_BUILD_NUMBERS = "buildNumbers" JSON_RESULTS_TESTS = "tests" JSON_RESULTS_RESULTS = "results" JSON_RESULTS_TIMES = "times" JSON_RESULTS_VERSION = 3 JSON_RESULTS_MAX_BUILDS = 750 class JsonResults(object): @classmethod def _strip_prefix_suffix(cls, data): """Strip out prefix and suffix of json results string. Args: data: json file content. Returns: json string without prefix and suffix. """ assert(data.startswith(JSON_RESULTS_PREFIX)) assert(data.endswith(JSON_RESULTS_SUFFIX)) return data[len(JSON_RESULTS_PREFIX): len(data) - len(JSON_RESULTS_SUFFIX)] @classmethod def _generate_file_data(cls, json, sort_keys=False): """Given json string, generate file content data by adding prefix and suffix. Args: json: json string without prefix and suffix. Returns: json file data. """ data = simplejson.dumps(json, separators=(',', ':'), sort_keys=sort_keys) return JSON_RESULTS_PREFIX + data + JSON_RESULTS_SUFFIX @classmethod def _load_json(cls, file_data): """Load json file to a python object. Args: file_data: json file content. Returns: json object or None on failure. """ json_results_str = cls._strip_prefix_suffix(file_data) if not json_results_str: logging.warning("No json results data.") return None try: return simplejson.loads(json_results_str) except Exception, err: logging.debug(json_results_str) logging.error("Failed to load json results: %s", str(err)) return None @classmethod def _merge_json(cls, aggregated_json, incremental_json): """Merge incremental json into aggregated json results. Args: aggregated_json: aggregated json object. incremental_json: incremental json object. Returns: True if merge succeeds or False on failure. """ # Merge non tests property data. # Tests properties are merged in _merge_tests. if not cls._merge_non_test_data(aggregated_json, incremental_json): return False # Merge tests results and times incremental_tests = incremental_json[JSON_RESULTS_TESTS] if incremental_tests: aggregated_tests = aggregated_json[JSON_RESULTS_TESTS] cls._merge_tests(aggregated_tests, incremental_tests) return True @classmethod def _merge_non_test_data(cls, aggregated_json, incremental_json): """Merge incremental non tests property data into aggregated json results. Args: aggregated_json: aggregated json object. incremental_json: incremental json object. Returns: True if merge succeeds or False on failure. """ incremental_builds = incremental_json[JSON_RESULTS_BUILD_NUMBERS] aggregated_builds = aggregated_json[JSON_RESULTS_BUILD_NUMBERS] aggregated_build_number = int(aggregated_builds[0]) # Loop through all incremental builds, start from the oldest run. for index in reversed(range(len(incremental_builds))): build_number = int(incremental_builds[index]) logging.debug("Merging build %s, incremental json index: %d.", build_number, index) # Return if not all build numbers in the incremental json results # are newer than the most recent build in the aggregated results. # FIXME: make this case work. if build_number < aggregated_build_number: logging.warning(("Build %d in incremental json is older than " "the most recent build in aggregated results: %d"), build_number, aggregated_build_number) return False # Return if the build number is duplicated. # FIXME: skip the duplicated build and merge rest of the results. # Need to be careful on skiping the corresponding value in # _merge_tests because the property data for each test could # be accumulated. if build_number == aggregated_build_number: logging.warning("Duplicate build %d in incremental json", build_number) return False # Merge this build into aggreagated results. cls._merge_one_build(aggregated_json, incremental_json, index) logging.debug("Merged build %s, merged json: %s.", build_number, aggregated_json) return True @classmethod def _merge_one_build(cls, aggregated_json, incremental_json, incremental_index): """Merge one build of incremental json into aggregated json results. Args: aggregated_json: aggregated json object. incremental_json: incremental json object. incremental_index: index of the incremental json results to merge. """ for key in incremental_json.keys(): # Merge json results except "tests" properties (results, times etc). # "tests" properties will be handled separately. if key == JSON_RESULTS_TESTS: continue if key in aggregated_json: aggregated_json[key].insert( 0, incremental_json[key][incremental_index]) aggregated_json[key] = \ aggregated_json[key][:JSON_RESULTS_MAX_BUILDS] else: aggregated_json[key] = incremental_json[key] @classmethod def _merge_tests(cls, aggregated_json, incremental_json): """Merge "tests" properties:results, times. Args: aggregated_json: aggregated json object. incremental_json: incremental json object. """ for test_name in incremental_json: incremental_test = incremental_json[test_name] if test_name in aggregated_json: aggregated_test = aggregated_json[test_name] cls._insert_item_run_length_encoded( incremental_test[JSON_RESULTS_RESULTS], aggregated_test[JSON_RESULTS_RESULTS]) cls._insert_item_run_length_encoded( incremental_test[JSON_RESULTS_TIMES], aggregated_test[JSON_RESULTS_TIMES]) else: aggregated_json[test_name] = incremental_test @classmethod def _insert_item_run_length_encoded(cls, incremental_item, aggregated_item): """Inserts the incremental run-length encoded results into the aggregated run-length encoded results. Args: incremental_item: incremental run-length encoded results. aggregated_item: aggregated run-length encoded results. """ for item in incremental_item: if len(aggregated_item) and item[1] == aggregated_item[0][1]: aggregated_item[0][0] = min( aggregated_item[0][0] + item[0], JSON_RESULTS_MAX_BUILDS) else: # The test item values need to be summed from continuous runs. # If there is an older item (not most recent one) whose value is # same as the one to insert, then we should remove the old item # from aggregated list. for i in reversed(range(1, len(aggregated_item))): if item[1] == aggregated_item[i][1]: aggregated_item.pop(i) aggregated_item.insert(0, item) @classmethod def _check_json(cls, builder, json): """Check whether the given json is valid. Args: builder: builder name this json is for. json: json object to check. Returns: True if the json is valid or False otherwise. """ version = json[JSON_RESULTS_VERSION_KEY] if version > JSON_RESULTS_VERSION: logging.error("Results JSON version '%s' is not supported.", version) return False if not builder in json: logging.error("Builder '%s' is not in json results.", builder) return False results_for_builder = json[builder] if not JSON_RESULTS_BUILD_NUMBERS in results_for_builder: logging.error("Missing build number in json results.") return False return True @classmethod def merge(cls, builder, aggregated, incremental, sort_keys=False): """Merge incremental json file data with aggregated json file data. Args: builder: builder name. aggregated: aggregated json file data. incremental: incremental json file data. sort_key: whether or not to sort key when dumping json results. Returns: Merged json file data if merge succeeds or None on failure. """ if not incremental: logging.warning("Nothing to merge.") return None logging.info("Loading incremental json...") incremental_json = cls._load_json(incremental) if not incremental_json: return None logging.info("Checking incremental json...") if not cls._check_json(builder, incremental_json): return None logging.info("Loading existing aggregated json...") aggregated_json = cls._load_json(aggregated) if not aggregated_json: return incremental logging.info("Checking existing aggregated json...") if not cls._check_json(builder, aggregated_json): return incremental logging.info("Merging json results...") try: if not cls._merge_json( aggregated_json[builder], incremental_json[builder]): return None except Exception, err: logging.error("Failed to merge json results: %s", str(err)) return None aggregated_json[JSON_RESULTS_VERSION_KEY] = JSON_RESULTS_VERSION return cls._generate_file_data(aggregated_json, sort_keys) @classmethod def update(cls, builder, test_type, incremental): """Update datastore json file data by merging it with incremental json file. Args: builder: builder name. test_type: type of test results. incremental: incremental json file data to merge. Returns: TestFile object if update succeeds or None on failure. """ files = TestFile.get_files(builder, test_type, JSON_RESULTS_FILE) if files: file = files[0] new_results = cls.merge(builder, file.data, incremental) else: # Use the incremental data if there is no aggregated file to merge. file = TestFile() file.builder = builder file.name = JSON_RESULTS_FILE new_results = incremental logging.info("No existing json results, incremental json is saved.") if not new_results: return None if not file.save(new_results): return None return file