summaryrefslogtreecommitdiffstats
path: root/WebKitTools/TestResultServer/model/datastorefile.py
blob: dd4c36612a28eec8c4451c646c34eff2294b2bb7 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
# Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
#     * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#     * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
#     * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

from datetime import datetime
import logging

from google.appengine.ext import db

MAX_DATA_ENTRY_PER_FILE = 10
MAX_ENTRY_LEN = 1000 * 1000


class DataEntry(db.Model):
    """Datastore entry that stores one segmant of file data
       (<1000*1000 bytes).
    """

    data = db.BlobProperty()

    @classmethod
    def get(cls, key):
        return db.get(key)

    def get_data(self, key):
        return db.get(key)


class DataStoreFile(db.Model):
    """This class stores file in datastore.
       If a file is oversize (>1000*1000 bytes), the file is split into
       multiple segments and stored in multiple datastore entries.
    """

    name = db.StringProperty()
    data_keys = db.ListProperty(db.Key)
    date = db.DateTimeProperty(auto_now_add=True)

    data = None

    def delete_data(self, keys=None):
        if not keys:
            keys = self.data_keys

        for key in keys:
            data_entry = DataEntry.get(key)
            if data_entry:
                data_entry.delete()

    def save_data(self, data):
        if not data:
            logging.warning("No data to save.")
            return False

        if len(data) > (MAX_DATA_ENTRY_PER_FILE * MAX_ENTRY_LEN):
            logging.error("File too big, can't save to datastore: %dK",
                len(data) / 1024)
            return False

        start = 0
        keys = self.data_keys
        self.data_keys = []
        while start < len(data):
            if keys:
                key = keys.pop(0)
                data_entry = DataEntry.get(key)
                if not data_entry:
                    logging.warning("Found key, but no data entry: %s", key)
                    data_entry = DataEntry()
            else:
                data_entry = DataEntry()

            data_entry.data = db.Blob(data[start: start + MAX_ENTRY_LEN])
            data_entry.put()

            logging.info("Data saved: %s.", data_entry.key())
            self.data_keys.append(data_entry.key())

            start = start + MAX_ENTRY_LEN

        if keys:
            self.delete_data(keys)

        self.data = data

        return True

    def load_data(self):
        if not self.data_keys:
            logging.warning("No data to load.")
            return None

        data = []
        for key in self.data_keys:
            logging.info("Loading data for key: %s.", key)
            data_entry = DataEntry.get(key)
            if not data_entry:
                logging.error("No data found for key: %s.", key)
                return None

            data.append(data_entry.data)

        self.data = "".join(data)

        return self.data